ngram
listlengths
0
82k
[ "coding:utf-8 -*- from uiObject import uiObject # main入口 if __name__", "uiObject # main入口 if __name__ == '__main__': ui = uiObject()", "-*- coding:utf-8 -*- from uiObject import uiObject # main入口 if", "uiObject import uiObject # main入口 if __name__ == '__main__': ui", "# -*- coding:utf-8 -*- from uiObject import uiObject # main入口", "# main入口 if __name__ == '__main__': ui = uiObject() ui.ui_process()", "-*- from uiObject import uiObject # main入口 if __name__ ==", "<reponame>shengqiangzhang/examples-of-web-crawlers # -*- coding:utf-8 -*- from uiObject import uiObject #", "from uiObject import uiObject # main入口 if __name__ == '__main__':", "import uiObject # main入口 if __name__ == '__main__': ui =" ]
[ "class Photo(models.Model): category = models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True )", "image = CloudinaryField('image', default='') description = models.TextField() def __str__(self): return", "self.name class Photo(models.Model): category = models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True", "from cloudinary.models import CloudinaryField # Create your models here. class", "Category, on_delete=models.SET_NULL, null=True, blank=True ) image = CloudinaryField('image', default='') description", ") image = CloudinaryField('image', default='') description = models.TextField() def __str__(self):", "Category(models.Model): name = models.CharField( max_length=200, null=False, blank=False ) def __str__(self):", "null=True, blank=True ) image = CloudinaryField('image', default='') description = models.TextField()", "blank=True ) image = CloudinaryField('image', default='') description = models.TextField() def", "= models.CharField( max_length=200, null=False, blank=False ) def __str__(self): return self.name", "models from cloudinary.models import CloudinaryField # Create your models here.", "from django.db import models from cloudinary.models import CloudinaryField # Create", ") def __str__(self): return self.name class Photo(models.Model): category = models.ForeignKey(", "name = models.CharField( max_length=200, null=False, blank=False ) def __str__(self): return", "import models from cloudinary.models import CloudinaryField # Create your models", "your models here. class Category(models.Model): name = models.CharField( max_length=200, null=False,", "models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True ) image = CloudinaryField('image', default='')", "# Create your models here. class Category(models.Model): name = models.CharField(", "cloudinary.models import CloudinaryField # Create your models here. class Category(models.Model):", "null=False, blank=False ) def __str__(self): return self.name class Photo(models.Model): category", "Photo(models.Model): category = models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True ) image", "max_length=200, null=False, blank=False ) def __str__(self): return self.name class Photo(models.Model):", "import CloudinaryField # Create your models here. class Category(models.Model): name", "def __str__(self): return self.name class Photo(models.Model): category = models.ForeignKey( Category,", "= models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True ) image = CloudinaryField('image',", "CloudinaryField # Create your models here. class Category(models.Model): name =", "models.CharField( max_length=200, null=False, blank=False ) def __str__(self): return self.name class", "django.db import models from cloudinary.models import CloudinaryField # Create your", "on_delete=models.SET_NULL, null=True, blank=True ) image = CloudinaryField('image', default='') description =", "__str__(self): return self.name class Photo(models.Model): category = models.ForeignKey( Category, on_delete=models.SET_NULL,", "here. class Category(models.Model): name = models.CharField( max_length=200, null=False, blank=False )", "Create your models here. class Category(models.Model): name = models.CharField( max_length=200,", "class Category(models.Model): name = models.CharField( max_length=200, null=False, blank=False ) def", "= CloudinaryField('image', default='') description = models.TextField() def __str__(self): return self.description", "models here. class Category(models.Model): name = models.CharField( max_length=200, null=False, blank=False", "return self.name class Photo(models.Model): category = models.ForeignKey( Category, on_delete=models.SET_NULL, null=True,", "blank=False ) def __str__(self): return self.name class Photo(models.Model): category =", "category = models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True ) image =" ]
[ "print(\"With status code {}\".format(status_code)) json_data = json.loads(response.text) return json_data, status_code", "`get_request` to make HTTP GET requests # e.g., response =", "else: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs) except: print(\"Network Error\")", "print(\"Post to url: {} \".format(url)) print(kwargs) print(json_payload) response = requests.post(url,", "print(\"GET from {}\".format(url)) print(kwargs) try: if api_key is not None:", "json_result: result = json_result[\"message\"] else: result = \"Unknown error\" return", "zip=dealer.get(\"zip\")) # dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), # id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"),", "object for dealer in dealers: # dlr_data = dealer[\"doc\"] #", "json_result: # Get the row list in JSON as reviews", "import CarDealer, DealerReview from requests.auth import HTTPBasicAuth import logging logger", "a URL parameter json_result, status_code = get_request(url, None, dealerId=dealerId) if", "#sentiment = analyze_review_sentiments(review[\"review\"]) review_obj = DealerReview( id=review.get(\"id\"), name=review.get(\"name\"), review=review.get(\"review\"), purchase=review.get(\"purchase\"),", "import logging logger = logging.getLogger(__name__) # Create a `get_request` to", "city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) #", "JSON as reviews reviews = json_result[\"body\"][\"data\"] # For each review", "lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), state=dealer.get(\"state\"), st=dealer.get(\"st\"), zip=dealer.get(\"zip\")) # dealer_obj = CarDealer(address=dlr_data.get(\"address\"),", "response.status_code print(\"With status code {}\".format(status_code)) json_data = json.loads(response.text) return json_data,", "to get dealers from a cloud function def get_dealers_from_cf(url, **kwargs):", "review=review.get(\"review\"), purchase=review.get(\"purchase\"), car_make=review.get(\"car_make\", None), car_model=review.get(\"car_model\", None), car_year=review.get(\"car_year\", None), purchase_date=review.get(\"purchase_date\", None))", "NLU and analyze text # def analyze_review_sentiments(text): # - Call", "return json_data, status_code # Create a get_dealers_from_cf method to get", "dealers = json_result[\"rows\"] # For each dealer object for dealer", "# For each dealer object for dealer in dealers: #", "api_key)) else: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs) except: print(\"Network", "dlr_data[\"address\"]) if dlr_data.get('address'): # Create a CarDealer object with values", "#print('ADDRESS', dlr_data[\"address\"]) if dlr_data.get('address'): # Create a CarDealer object with", "json=payload) def post_request(url, json_payload, **kwargs): print(\"Post to url: {} \".format(url))", "params=kwargs, json=json_payload) status_code = response.status_code print(\"With status code {}\".format(status_code)) json_data", "car_year=review.get(\"car_year\", None), purchase_date=review.get(\"purchase_date\", None)) info.append(review_obj) elif json_result: result = json_result[\"message\"]", "list in JSON as dealers dealers = json_result[\"rows\"] for dealer", "purchase_date=review.get(\"purchase_date\", None)) info.append(review_obj) elif json_result: result = json_result[\"message\"] else: result", "CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), # id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), # short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"),", "result # Create an `analyze_review_sentiments` method to call Watson NLU", "def get_dealers_by_state (url, state): info = [] result = \"ok\"", "get_request(url, None, state=state) if status_code == 200 and json_result: #", "None result = \"ok\" json_result, status_code = get_request(url, None, dealerId=dealerId)", "get dealers from a cloud function def get_dealers_from_cf(url, **kwargs): info", "`doc` object info = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"),", "Get the row list in JSON as dealers dealers =", "# Get the row list in JSON as dealers dealers", "short_name=dealer.get(\"short_name\"), st=dealer.get(\"st\"), state=dealer.get(\"state\"), zip=dealer.get(\"zip\")) # info = CarDealer(address=dealer[\"address\"], city=dealer[\"city\"], full_name=dealer[\"full_name\"],", "result def get_dealers_by_state (url, state): info = [] result =", "to make HTTP POST requests # e.g., response = requests.post(url,", "\"ok\" json_result, status_code = get_request(url, None, dealerId=dealerId) # json_result, status_code", "Call get_request() with specified arguments logger.info(\"Get Dealers from CF Called!\")", "HTTP POST requests # e.g., response = requests.post(url, params=kwargs, json=payload)", "dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), # id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), #", "def get_dealers_from_cf(url, **kwargs): info = [] result = \"ok\" #", "Called!\") json_result, status_code = get_request(url, None) if status_code == 200", "None), purchase_date=review.get(\"purchase_date\", None)) info.append(review_obj) elif json_result: result = json_result[\"message\"] else:", "- Call get_request() with specified arguments # - Get the", "in `doc` object dealer_obj = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"),", "# e.g., response = requests.get(url, params=params, headers={'Content-Type': 'application/json'}, # auth=HTTPBasicAuth('apikey',", "info, result # Create an `analyze_review_sentiments` method to call Watson", "from a cloud function def get_dealers_from_cf(url, **kwargs): info = []", "get_request with a URL parameter info = None result =", "result def get_dealer_by_id(url, dealerId): # Call get_request with a URL", "= None result = \"ok\" json_result, status_code = get_request(url, None,", "POST requests # e.g., response = requests.post(url, params=kwargs, json=payload) def", "logger.info(len(dealers)) for dealer in dealers: dlr_data = dealer['doc'] #print('ADDRESS', dlr_data[\"address\"])", "try: if api_key is not None: response = requests.get(url, headers={'Content-Type':", "dealerId): info = [] result = \"ok\" # Call get_request", "(dealerId == review.get(\"dealership\")): # Create a DealerReview object with values", "= requests.post(url, headers={'Content-Type': 'application/json'}, params=kwargs, json=json_payload) status_code = response.status_code print(\"With", "state=dlr_data.get(\"state\"), # st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) info.append(dealer_obj) elif json_result: result = json_result[\"message\"]", "response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey', api_key)) else: response", "= requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs) except: print(\"Network Error\") status_code =", "zip=dlr_data.get(\"zip\")) # dealer_obj = CarDealer(address=dealer[\"doc\"][\"address\"], city=dealer[\"doc\"][\"city\"], full_name=dealer[\"doc\"][\"full_name\"], # id=dealer[\"doc\"][\"id\"], lat=dealer[\"doc\"][\"lat\"],", "DealerReview object with values in object #sentiment = analyze_review_sentiments(review[\"review\"]) review_obj", "dealers from a cloud function def get_dealers_from_cf(url, **kwargs): info =", "- Call get_request() with specified arguments logger.info(\"Get Dealers from CF", "state=dealer.get(\"state\"), zip=dealer.get(\"zip\")) # info = CarDealer(address=dealer[\"address\"], city=dealer[\"city\"], full_name=dealer[\"full_name\"], # id=dealer[\"id\"],", "dealerId=dealerId) # json_result, status_code = get_request(url, None, dealerId=dealerId) if status_code", "# Create a `post_request` to make HTTP POST requests #", "city=dealer[\"doc\"][\"city\"], full_name=dealer[\"doc\"][\"full_name\"], # id=dealer[\"doc\"][\"id\"], lat=dealer[\"doc\"][\"lat\"], long=dealer[\"doc\"][\"long\"], # short_name=dealer[\"doc\"][\"short_name\"], # st=dealer[\"doc\"][\"st\"],", "parameter json_result, status_code = get_request(url, None, dealerId=dealerId) if status_code ==", ".models import CarDealer, DealerReview from requests.auth import HTTPBasicAuth import logging", "import related models here from .models import CarDealer, DealerReview from", "print(json_payload) response = requests.post(url, headers={'Content-Type': 'application/json'}, params=kwargs, json=json_payload) status_code =", "object with values in `doc` object dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"),", "id=dealer[\"id\"], lat=dealer[\"lat\"], long=dealer[\"long\"], # short_name=dealer[\"short_name\"], state=dealer[\"state\"], # st=dealer[\"st\"], zip=dealer[\"zip\"]) elif", "analyze_review_sentiments(text): # - Call get_request() with specified arguments # -", "**kwargs): print(\"Post to url: {} \".format(url)) print(kwargs) print(json_payload) response =", "logger = logging.getLogger(__name__) # Create a `get_request` to make HTTP", "lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), # short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), # st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) info.append(dealer_obj) elif", "error\" return info, result def get_dealer_reviews_from_cf (url, dealerId): info =", "[] result = \"ok\" # - Call get_request() with specified", "None)) info.append(review_obj) elif json_result: result = json_result[\"message\"] else: result =", "status_code == 200 and json_result: dealers = json_result['rows'] logger.info(len(dealers)) for", "review_obj = DealerReview( id=review.get(\"id\"), name=review.get(\"name\"), review=review.get(\"review\"), purchase=review.get(\"purchase\"), car_make=review.get(\"car_make\", None), car_model=review.get(\"car_model\",", "URL parameter info = None result = \"ok\" json_result, status_code", "[] result = \"ok\" # Call get_request with a URL", "= \"Unknown error\" return info, result def get_dealer_reviews_from_cf (url, dealerId):", "row list in JSON as dealers dealers = json_result[\"rows\"] #", "to make HTTP GET requests # e.g., response = requests.get(url,", "JSON as dealers dealers = json_result[\"rows\"] for dealer in dealers:", "json_data, status_code # Create a get_dealers_from_cf method to get dealers", "in dealers: # dlr_data = dealer[\"doc\"] # Create a CarDealer", "dlr_data = dealer[\"doc\"] # Create a CarDealer object with values", "# dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), # id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"),", "state=dealer[\"doc\"][\"state\"], zip=dealer[\"doc\"][\"zip\"]) info.append(dealer_obj) elif json_result: result = json_result[\"message\"] else: result", "else: result = \"Unknown error\" return info, result def get_dealer_reviews_from_cf", "return info, result def get_dealer_reviews_from_cf (url, dealerId): info = []", "id=dealer[\"doc\"][\"id\"], lat=dealer[\"doc\"][\"lat\"], long=dealer[\"doc\"][\"long\"], # short_name=dealer[\"doc\"][\"short_name\"], # st=dealer[\"doc\"][\"st\"], state=dealer[\"doc\"][\"state\"], zip=dealer[\"doc\"][\"zip\"]) info.append(dealer_obj)", "# - Call get_request() with specified arguments logger.info(\"Get Dealers from", "= requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey', api_key)) else: response =", "reviews: if (dealerId == review.get(\"dealership\")): # Create a DealerReview object", "CarDealer(address=dealer[\"doc\"][\"address\"], city=dealer[\"doc\"][\"city\"], full_name=dealer[\"doc\"][\"full_name\"], # id=dealer[\"doc\"][\"id\"], lat=dealer[\"doc\"][\"lat\"], long=dealer[\"doc\"][\"long\"], # short_name=dealer[\"doc\"][\"short_name\"], #", "else: result = \"Unknown error\" return info, result def get_dealer_by_id(url,", "None, state=state) if status_code == 200 and json_result: # Get", "row list in JSON as reviews reviews = json_result[\"body\"][\"data\"] #", "object with values in object #sentiment = analyze_review_sentiments(review[\"review\"]) review_obj =", "object #sentiment = analyze_review_sentiments(review[\"review\"]) review_obj = DealerReview( id=review.get(\"id\"), name=review.get(\"name\"), review=review.get(\"review\"),", "zip=dealer[\"doc\"][\"zip\"]) info.append(dealer_obj) elif json_result: result = json_result[\"message\"] else: result =", "elif json_result: result = json_result[\"message\"] else: result = \"Unknown error\"", "None, dealerId=dealerId) # json_result, status_code = get_request(url, None, dealerId=dealerId) if", "Watson NLU and analyze text # def analyze_review_sentiments(text): # -", "import json # import related models here from .models import", "status_code # Create a get_dealers_from_cf method to get dealers from", "= \"Unknown error\" return info, result def get_dealer_by_id(url, dealerId): #", "post_request(url, json_payload, **kwargs): print(\"Post to url: {} \".format(url)) print(kwargs) print(json_payload)", "None), car_year=review.get(\"car_year\", None), purchase_date=review.get(\"purchase_date\", None)) info.append(review_obj) elif json_result: result =", "Create a `get_request` to make HTTP GET requests # e.g.,", "**kwargs): info = [] result = \"ok\" # - Call", "state=dealer[\"state\"], # st=dealer[\"st\"], zip=dealer[\"zip\"]) elif json_result: result = json_result[\"message\"] else:", "get_request(url, api_key, **kwargs): print(\"GET from {}\".format(url)) print(kwargs) try: if api_key", "For each dealer object for dealer in dealers: # dlr_data", "== review.get(\"dealership\")): # Create a DealerReview object with values in", "get_dealer_by_id(url, dealerId): # Call get_request with a URL parameter info", "None: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey', api_key)) else:", "info = [] result = \"ok\" # - Call get_request()", "headers={'Content-Type': 'application/json'}, params=kwargs, json=json_payload) status_code = response.status_code print(\"With status code", "api_key is not None: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs,", "dealer_obj = CarDealer(address=dealer[\"doc\"][\"address\"], city=dealer[\"doc\"][\"city\"], full_name=dealer[\"doc\"][\"full_name\"], # id=dealer[\"doc\"][\"id\"], lat=dealer[\"doc\"][\"lat\"], long=dealer[\"doc\"][\"long\"], #", "params=params, headers={'Content-Type': 'application/json'}, # auth=HTTPBasicAuth('apikey', api_key)) def get_request(url, api_key, **kwargs):", "200 and json_result: dealers = json_result['rows'] logger.info(len(dealers)) for dealer in", "lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), st=dealer.get(\"st\"), state=dealer.get(\"state\"), zip=dealer.get(\"zip\")) # info = CarDealer(address=dealer[\"address\"],", "logger.info(\"Get Dealers from CF Called!\") json_result, status_code = get_request(url, None)", "with values in `doc` object dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"),", "a `post_request` to make HTTP POST requests # e.g., response", "status_code = get_request(url, None) if status_code == 200 and json_result:", "from {}\".format(url)) print(kwargs) try: if api_key is not None: response", "CarDealer(address=dealer[\"address\"], city=dealer[\"city\"], full_name=dealer[\"full_name\"], # id=dealer[\"id\"], lat=dealer[\"lat\"], long=dealer[\"long\"], # short_name=dealer[\"short_name\"], state=dealer[\"state\"],", "dealer in dealers: # Create a CarDealer object with values", "if dlr_data.get('address'): # Create a CarDealer object with values in", "dealer[\"doc\"] # Create a CarDealer object with values in `doc`", "= CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), state=dealer.get(\"state\"), st=dealer.get(\"st\"),", "method to call Watson NLU and analyze text # def", "list in JSON as dealers dealers = json_result[\"rows\"] # For", "state=state) if status_code == 200 and json_result: # Get the", "dealers = json_result[\"rows\"] for dealer in dealers: # Create a", "None, dealerId=dealerId) if status_code == 200 and json_result: # Get", "id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), st=dealer.get(\"st\"), state=dealer.get(\"state\"), zip=dealer.get(\"zip\")) # info =", "result def get_dealer_reviews_from_cf (url, dealerId): info = [] result =", "CarDealer object with values in `doc` object dealer_obj = CarDealer(address=dealer.get(\"address\"),", "status_code # Create a `post_request` to make HTTP POST requests", "CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\"))", "id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), # short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), # st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) info.append(dealer_obj)", "id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), state=dealer.get(\"state\"), st=dealer.get(\"st\"), zip=dealer.get(\"zip\")) # dealer_obj =", "= DealerReview( id=review.get(\"id\"), name=review.get(\"name\"), review=review.get(\"review\"), purchase=review.get(\"purchase\"), car_make=review.get(\"car_make\", None), car_model=review.get(\"car_model\", None),", "# dlr_data = dealer[\"doc\"] # Create a CarDealer object with", "if status_code == 200 and json_result: # Get the row", "get_dealers_from_cf(url, **kwargs): info = [] result = \"ok\" # -", "city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), state=dealer.get(\"state\"), st=dealer.get(\"st\"), zip=dealer.get(\"zip\")) #", "= get_request(url, None, dealerId=dealerId) if status_code == 200 and json_result:", "\"Unknown error\" return info, result # Create an `analyze_review_sentiments` method", "st=dealer[\"st\"], zip=dealer[\"zip\"]) elif json_result: result = json_result[\"message\"] else: result =", "= [] result = \"ok\" # - Call get_request() with", "dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"),", "call Watson NLU and analyze text # def analyze_review_sentiments(text): #", "function def get_dealers_from_cf(url, **kwargs): info = [] result = \"ok\"", "HTTP GET requests # e.g., response = requests.get(url, params=params, headers={'Content-Type':", "= CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), st=dealer.get(\"st\"), state=dealer.get(\"state\"),", "json_result[\"message\"] else: result = \"Unknown error\" return info, result def", "result = \"Unknown error\" return info, result # Create an", "json_result, status_code = get_request(url, None, dealerId=dealerId) # json_result, status_code =", "get_dealer_reviews_from_cf (url, dealerId): info = [] result = \"ok\" #", "an `analyze_review_sentiments` method to call Watson NLU and analyze text", "# st=dealer[\"doc\"][\"st\"], state=dealer[\"doc\"][\"state\"], zip=dealer[\"doc\"][\"zip\"]) info.append(dealer_obj) elif json_result: result = json_result[\"message\"]", "result = \"Unknown error\" return info, result def get_dealer_reviews_from_cf (url,", "each dealer object for dealer in dealers: # dlr_data =", "logging.getLogger(__name__) # Create a `get_request` to make HTTP GET requests", "status_code = response.status_code print(\"With status code {}\".format(status_code)) json_data = json.loads(response.text)", "specified arguments # - Get the returned sentiment label such", "json_result[\"rows\"] # For each dealer object for dealer in dealers:", "= json_result[\"body\"][\"data\"] # For each review object for review in", "car_model=review.get(\"car_model\", None), car_year=review.get(\"car_year\", None), purchase_date=review.get(\"purchase_date\", None)) info.append(review_obj) elif json_result: result", "- Get the returned sentiment label such as Positive or", "= CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), # id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), # short_name=dlr_data.get(\"short_name\"),", "info = None result = \"ok\" json_result, status_code = get_request(url,", "= requests.post(url, params=kwargs, json=payload) def post_request(url, json_payload, **kwargs): print(\"Post to", "get_request(url, None) if status_code == 200 and json_result: dealers =", "json_payload, **kwargs): print(\"Post to url: {} \".format(url)) print(kwargs) print(json_payload) response", "requests.get(url, params=params, headers={'Content-Type': 'application/json'}, # auth=HTTPBasicAuth('apikey', api_key)) def get_request(url, api_key,", "def post_request(url, json_payload, **kwargs): print(\"Post to url: {} \".format(url)) print(kwargs)", "# info = CarDealer(address=dealer[\"address\"], city=dealer[\"city\"], full_name=dealer[\"full_name\"], # id=dealer[\"id\"], lat=dealer[\"lat\"], long=dealer[\"long\"],", "status_code = get_request(url, None, dealerId=dealerId) if status_code == 200 and", "in object #sentiment = analyze_review_sentiments(review[\"review\"]) review_obj = DealerReview( id=review.get(\"id\"), name=review.get(\"name\"),", "CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), st=dealer.get(\"st\"), state=dealer.get(\"state\"), zip=dealer.get(\"zip\"))", "get_request(url, None, dealerId=dealerId) if status_code == 200 and json_result: #", "headers={'Content-Type': 'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey', api_key)) else: response = requests.get(url, headers={'Content-Type':", "short_name=dealer.get(\"short_name\"), state=dealer.get(\"state\"), st=dealer.get(\"st\"), zip=dealer.get(\"zip\")) # dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"),", "id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) # dealer_obj =", "lat=dealer[\"doc\"][\"lat\"], long=dealer[\"doc\"][\"long\"], # short_name=dealer[\"doc\"][\"short_name\"], # st=dealer[\"doc\"][\"st\"], state=dealer[\"doc\"][\"state\"], zip=dealer[\"doc\"][\"zip\"]) info.append(dealer_obj) elif", "review object for review in reviews: if (dealerId == review.get(\"dealership\")):", "error\" return info, result # Create an `analyze_review_sentiments` method to", "# short_name=dealer[\"short_name\"], state=dealer[\"state\"], # st=dealer[\"st\"], zip=dealer[\"zip\"]) elif json_result: result =", "params=kwargs, json=payload) def post_request(url, json_payload, **kwargs): print(\"Post to url: {}", "in reviews: if (dealerId == review.get(\"dealership\")): # Create a DealerReview", "= json.loads(response.text) return json_data, status_code # Create a get_dealers_from_cf method", "CF Called!\") json_result, status_code = get_request(url, None) if status_code ==", "a CarDealer object with values in `doc` object info =", "# Call get_request with a URL parameter json_result, status_code =", "status_code = get_request(url, None, dealerId=dealerId) # json_result, status_code = get_request(url,", "# - Call get_request() with specified arguments # - Get", "in dealers: dlr_data = dealer['doc'] #print('ADDRESS', dlr_data[\"address\"]) if dlr_data.get('address'): #", "json_result[\"rows\"] for dealer in dealers: # Create a CarDealer object", "= \"Unknown error\" return info, result # Create an `analyze_review_sentiments`", "JSON as dealers dealers = json_result[\"rows\"] # For each dealer", "city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), st=dealer.get(\"st\"), state=dealer.get(\"state\"), zip=dealer.get(\"zip\")) #", "info.append(dealer_obj) elif json_result: result = json_result[\"message\"] else: result = \"Unknown", "Call get_request() with specified arguments # - Get the returned", "short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) # dealer_obj = CarDealer(address=dealer[\"doc\"][\"address\"], city=dealer[\"doc\"][\"city\"], full_name=dealer[\"doc\"][\"full_name\"],", "# Get the row list in JSON as reviews reviews", "json_result: dealers = json_result['rows'] logger.info(len(dealers)) for dealer in dealers: dlr_data", "get_dealers_from_cf method to get dealers from a cloud function def", "# import related models here from .models import CarDealer, DealerReview", "st=dealer.get(\"st\"), state=dealer.get(\"state\"), zip=dealer.get(\"zip\")) # info = CarDealer(address=dealer[\"address\"], city=dealer[\"city\"], full_name=dealer[\"full_name\"], #", "params=kwargs) except: print(\"Network Error\") status_code = response.status_code print(\"With status code", "get_request() with specified arguments # - Get the returned sentiment", "in `doc` object dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"),", "# id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), # short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), # st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\"))", "Get the row list in JSON as reviews reviews =", "make HTTP POST requests # e.g., response = requests.post(url, params=kwargs,", "json_result[\"message\"] else: result = \"Unknown error\" return info, result #", "a CarDealer object with values in `doc` object dealer_obj =", "# - Get the returned sentiment label such as Positive", "= get_request(url, None, state=state) if status_code == 200 and json_result:", "reviews = json_result[\"body\"][\"data\"] # For each review object for review", "status code {}\".format(status_code)) json_data = json.loads(response.text) return json_data, status_code #", "requests.post(url, headers={'Content-Type': 'application/json'}, params=kwargs, json=json_payload) status_code = response.status_code print(\"With status", "# Create a CarDealer object with values in `doc` object", "info, result def get_dealers_by_state (url, state): info = [] result", "Error\") status_code = response.status_code print(\"With status code {}\".format(status_code)) json_data =", "response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs) except: print(\"Network Error\") status_code", "{} \".format(url)) print(kwargs) print(json_payload) response = requests.post(url, headers={'Content-Type': 'application/json'}, params=kwargs,", "= json_result[\"rows\"] for dealer in dealers: # Create a CarDealer", "to url: {} \".format(url)) print(kwargs) print(json_payload) response = requests.post(url, headers={'Content-Type':", "json.loads(response.text) return json_data, status_code # Create a `post_request` to make", "purchase=review.get(\"purchase\"), car_make=review.get(\"car_make\", None), car_model=review.get(\"car_model\", None), car_year=review.get(\"car_year\", None), purchase_date=review.get(\"purchase_date\", None)) info.append(review_obj)", "= json.loads(response.text) return json_data, status_code # Create a `post_request` to", "'application/json'}, params=kwargs) except: print(\"Network Error\") status_code = response.status_code print(\"With status", "URL parameter json_result, status_code = get_request(url, None, dealerId=dealerId) if status_code", "get_request with a URL parameter json_result, status_code = get_request(url, None,", "with a URL parameter json_result, status_code = get_request(url, None, dealerId=dealerId)", "with a URL parameter info = None result = \"ok\"", "st=dealer.get(\"st\"), zip=dealer.get(\"zip\")) # dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), # id=dlr_data.get(\"id\"),", "dealer['doc'] #print('ADDRESS', dlr_data[\"address\"]) if dlr_data.get('address'): # Create a CarDealer object", "is not None: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey',", "with specified arguments logger.info(\"Get Dealers from CF Called!\") json_result, status_code", "values in `doc` object dealer_obj = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"),", "def get_dealer_reviews_from_cf (url, dealerId): info = [] result = \"ok\"", "except: print(\"Network Error\") status_code = response.status_code print(\"With status code {}\".format(status_code))", "with values in `doc` object info = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"),", "long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), state=dealer.get(\"state\"), st=dealer.get(\"st\"), zip=dealer.get(\"zip\")) # dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"),", "<reponame>christiansencq/ibm_capstone import requests import json # import related models here", "with specified arguments # - Get the returned sentiment label", "json_data, status_code # Create a `post_request` to make HTTP POST", "result = \"ok\" # Call get_request with a URL parameter", "json_result, status_code = get_request(url, None) if status_code == 200 and", "from requests.auth import HTTPBasicAuth import logging logger = logging.getLogger(__name__) #", "auth=HTTPBasicAuth('apikey', api_key)) def get_request(url, api_key, **kwargs): print(\"GET from {}\".format(url)) print(kwargs)", "lat=dealer[\"lat\"], long=dealer[\"long\"], # short_name=dealer[\"short_name\"], state=dealer[\"state\"], # st=dealer[\"st\"], zip=dealer[\"zip\"]) elif json_result:", "reviews reviews = json_result[\"body\"][\"data\"] # For each review object for", "if (dealerId == review.get(\"dealership\")): # Create a DealerReview object with", "# Call get_request with a URL parameter info = None", "logging logger = logging.getLogger(__name__) # Create a `get_request` to make", "import requests import json # import related models here from", "full_name=dlr_data.get(\"full_name\"), # id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), # short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), # st=dlr_data.get(\"st\"),", "name=review.get(\"name\"), review=review.get(\"review\"), purchase=review.get(\"purchase\"), car_make=review.get(\"car_make\", None), car_model=review.get(\"car_model\", None), car_year=review.get(\"car_year\", None), purchase_date=review.get(\"purchase_date\",", "object dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), short_name=dlr_data.get(\"short_name\"),", "# short_name=dealer[\"doc\"][\"short_name\"], # st=dealer[\"doc\"][\"st\"], state=dealer[\"doc\"][\"state\"], zip=dealer[\"doc\"][\"zip\"]) info.append(dealer_obj) elif json_result: result", "status_code == 200 and json_result: # Get the row list", "info = [] result = \"ok\" # Call get_request with", "short_name=dealer[\"short_name\"], state=dealer[\"state\"], # st=dealer[\"st\"], zip=dealer[\"zip\"]) elif json_result: result = json_result[\"message\"]", "= requests.get(url, params=params, headers={'Content-Type': 'application/json'}, # auth=HTTPBasicAuth('apikey', api_key)) def get_request(url,", "dealers = json_result['rows'] logger.info(len(dealers)) for dealer in dealers: dlr_data =", "`doc` object dealer_obj = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"),", "as dealers dealers = json_result[\"rows\"] # For each dealer object", "= json_result[\"message\"] else: result = \"Unknown error\" return info, result", "Create a `post_request` to make HTTP POST requests # e.g.,", "def analyze_review_sentiments(text): # - Call get_request() with specified arguments #", "json_result: # Get the row list in JSON as dealers", "requests.auth import HTTPBasicAuth import logging logger = logging.getLogger(__name__) # Create", "= [] result = \"ok\" # Call get_request with a", "response = requests.post(url, headers={'Content-Type': 'application/json'}, params=kwargs, json=json_payload) status_code = response.status_code", "the row list in JSON as dealers dealers = json_result[\"rows\"]", "car_make=review.get(\"car_make\", None), car_model=review.get(\"car_model\", None), car_year=review.get(\"car_year\", None), purchase_date=review.get(\"purchase_date\", None)) info.append(review_obj) elif", "long=dealer[\"doc\"][\"long\"], # short_name=dealer[\"doc\"][\"short_name\"], # st=dealer[\"doc\"][\"st\"], state=dealer[\"doc\"][\"state\"], zip=dealer[\"doc\"][\"zip\"]) info.append(dealer_obj) elif json_result:", "error\" return info, result def get_dealers_by_state (url, state): info =", "api_key, **kwargs): print(\"GET from {}\".format(url)) print(kwargs) try: if api_key is", "**kwargs): print(\"GET from {}\".format(url)) print(kwargs) try: if api_key is not", "dlr_data.get('address'): # Create a CarDealer object with values in `doc`", "parameter json_result, status_code = get_request(url, None, state=state) if status_code ==", "= analyze_review_sentiments(review[\"review\"]) review_obj = DealerReview( id=review.get(\"id\"), name=review.get(\"name\"), review=review.get(\"review\"), purchase=review.get(\"purchase\"), car_make=review.get(\"car_make\",", "print(kwargs) print(json_payload) response = requests.post(url, headers={'Content-Type': 'application/json'}, params=kwargs, json=json_payload) status_code", "json_result, status_code = get_request(url, None, state=state) if status_code == 200", "st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) # dealer_obj = CarDealer(address=dealer[\"doc\"][\"address\"], city=dealer[\"doc\"][\"city\"], full_name=dealer[\"doc\"][\"full_name\"], # id=dealer[\"doc\"][\"id\"],", "# def analyze_review_sentiments(text): # - Call get_request() with specified arguments", "= json_result['rows'] logger.info(len(dealers)) for dealer in dealers: dlr_data = dealer['doc']", "= CarDealer(address=dealer[\"address\"], city=dealer[\"city\"], full_name=dealer[\"full_name\"], # id=dealer[\"id\"], lat=dealer[\"lat\"], long=dealer[\"long\"], # short_name=dealer[\"short_name\"],", "response = requests.post(url, params=kwargs, json=payload) def post_request(url, json_payload, **kwargs): print(\"Post", "for dealer in dealers: # dlr_data = dealer[\"doc\"] # Create", "# auth=HTTPBasicAuth('apikey', api_key)) def get_request(url, api_key, **kwargs): print(\"GET from {}\".format(url))", "print(kwargs) try: if api_key is not None: response = requests.get(url,", "dealers: # Create a CarDealer object with values in `doc`", "# st=dealer[\"st\"], zip=dealer[\"zip\"]) elif json_result: result = json_result[\"message\"] else: result", "get_dealers_by_state (url, state): info = [] result = \"ok\" #", "get_request() with specified arguments logger.info(\"Get Dealers from CF Called!\") json_result,", "related models here from .models import CarDealer, DealerReview from requests.auth", "if api_key is not None: response = requests.get(url, headers={'Content-Type': 'application/json'},", "code {}\".format(status_code)) json_data = json.loads(response.text) return json_data, status_code # Create", "= dealer[\"doc\"] # Create a CarDealer object with values in", "state): info = [] result = \"ok\" # Call get_request", "Call get_request with a URL parameter info = None result", "with a URL parameter json_result, status_code = get_request(url, None, state=state)", "dealerId=dealerId) if status_code == 200 and json_result: # Get the", "the row list in JSON as reviews reviews = json_result[\"body\"][\"data\"]", "requests.post(url, params=kwargs, json=payload) def post_request(url, json_payload, **kwargs): print(\"Post to url:", "== 200 and json_result: dealers = json_result['rows'] logger.info(len(dealers)) for dealer", "review in reviews: if (dealerId == review.get(\"dealership\")): # Create a", "json_data = json.loads(response.text) return json_data, status_code # Create a `post_request`", "long=dlr_data.get(\"long\"), short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) # dealer_obj = CarDealer(address=dealer[\"doc\"][\"address\"], city=dealer[\"doc\"][\"city\"],", "from .models import CarDealer, DealerReview from requests.auth import HTTPBasicAuth import", "= \"ok\" json_result, status_code = get_request(url, None, dealerId=dealerId) # json_result,", "{}\".format(url)) print(kwargs) try: if api_key is not None: response =", "Create a CarDealer object with values in `doc` object dealer_obj", "values in object #sentiment = analyze_review_sentiments(review[\"review\"]) review_obj = DealerReview( id=review.get(\"id\"),", "and analyze text # def analyze_review_sentiments(text): # - Call get_request()", "= dealer['doc'] #print('ADDRESS', dlr_data[\"address\"]) if dlr_data.get('address'): # Create a CarDealer", "Create an `analyze_review_sentiments` method to call Watson NLU and analyze", "long=dlr_data.get(\"long\"), # short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), # st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) info.append(dealer_obj) elif json_result:", "For each review object for review in reviews: if (dealerId", "specified arguments logger.info(\"Get Dealers from CF Called!\") json_result, status_code =", "if status_code == 200 and json_result: dealers = json_result['rows'] logger.info(len(dealers))", "None) if status_code == 200 and json_result: dealers = json_result['rows']", "= logging.getLogger(__name__) # Create a `get_request` to make HTTP GET", "state=dealer.get(\"state\"), st=dealer.get(\"st\"), zip=dealer.get(\"zip\")) # dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), #", "short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), # st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) info.append(dealer_obj) elif json_result: result =", "json_result, status_code = get_request(url, None, dealerId=dealerId) if status_code == 200", "\"ok\" # Call get_request with a URL parameter json_result, status_code", "auth=HTTPBasicAuth('apikey', api_key)) else: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs) except:", "id=review.get(\"id\"), name=review.get(\"name\"), review=review.get(\"review\"), purchase=review.get(\"purchase\"), car_make=review.get(\"car_make\", None), car_model=review.get(\"car_model\", None), car_year=review.get(\"car_year\", None),", "dealers dealers = json_result[\"rows\"] # For each dealer object for", "json # import related models here from .models import CarDealer,", "review.get(\"dealership\")): # Create a DealerReview object with values in object", "'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey', api_key)) else: response = requests.get(url, headers={'Content-Type': 'application/json'},", "`post_request` to make HTTP POST requests # e.g., response =", "= \"Unknown error\" return info, result def get_dealers_by_state (url, state):", "info, result def get_dealer_by_id(url, dealerId): # Call get_request with a", "= response.status_code print(\"With status code {}\".format(status_code)) json_data = json.loads(response.text) return", "dealers: # dlr_data = dealer[\"doc\"] # Create a CarDealer object", "st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) info.append(dealer_obj) elif json_result: result = json_result[\"message\"] else: result", "result = \"Unknown error\" return info, result def get_dealers_by_state (url,", "# e.g., response = requests.post(url, params=kwargs, json=payload) def post_request(url, json_payload,", "object dealer_obj = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"),", "zip=dealer.get(\"zip\")) # info = CarDealer(address=dealer[\"address\"], city=dealer[\"city\"], full_name=dealer[\"full_name\"], # id=dealer[\"id\"], lat=dealer[\"lat\"],", "object info = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"),", "DealerReview from requests.auth import HTTPBasicAuth import logging logger = logging.getLogger(__name__)", "short_name=dealer[\"doc\"][\"short_name\"], # st=dealer[\"doc\"][\"st\"], state=dealer[\"doc\"][\"state\"], zip=dealer[\"doc\"][\"zip\"]) info.append(dealer_obj) elif json_result: result =", "dealer object for dealer in dealers: # dlr_data = dealer[\"doc\"]", "Create a get_dealers_from_cf method to get dealers from a cloud", "json.loads(response.text) return json_data, status_code # Create a get_dealers_from_cf method to", "in `doc` object info = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"),", "row list in JSON as dealers dealers = json_result[\"rows\"] for", "and json_result: dealers = json_result['rows'] logger.info(len(dealers)) for dealer in dealers:", "# Create a `get_request` to make HTTP GET requests #", "requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey', api_key)) else: response = requests.get(url,", "Create a DealerReview object with values in object #sentiment =", "'application/json'}, params=kwargs, json=json_payload) status_code = response.status_code print(\"With status code {}\".format(status_code))", "json_result[\"body\"][\"data\"] # For each review object for review in reviews:", "models here from .models import CarDealer, DealerReview from requests.auth import", "dealers dealers = json_result[\"rows\"] for dealer in dealers: # Create", "list in JSON as reviews reviews = json_result[\"body\"][\"data\"] # For", "in dealers: # Create a CarDealer object with values in", "def get_request(url, api_key, **kwargs): print(\"GET from {}\".format(url)) print(kwargs) try: if", "a cloud function def get_dealers_from_cf(url, **kwargs): info = [] result", "dealerId): # Call get_request with a URL parameter info =", "method to get dealers from a cloud function def get_dealers_from_cf(url,", "dealer_obj = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), state=dealer.get(\"state\"),", "object for review in reviews: if (dealerId == review.get(\"dealership\")): #", "# dealer_obj = CarDealer(address=dealer[\"doc\"][\"address\"], city=dealer[\"doc\"][\"city\"], full_name=dealer[\"doc\"][\"full_name\"], # id=dealer[\"doc\"][\"id\"], lat=dealer[\"doc\"][\"lat\"], long=dealer[\"doc\"][\"long\"],", "full_name=dealer[\"full_name\"], # id=dealer[\"id\"], lat=dealer[\"lat\"], long=dealer[\"long\"], # short_name=dealer[\"short_name\"], state=dealer[\"state\"], # st=dealer[\"st\"],", "parameter info = None result = \"ok\" json_result, status_code =", "# short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), # st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) info.append(dealer_obj) elif json_result: result", "= \"ok\" # - Call get_request() with specified arguments logger.info(\"Get", "= CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), st=dlr_data.get(\"st\"),", "a URL parameter json_result, status_code = get_request(url, None, state=state) if", "headers={'Content-Type': 'application/json'}, # auth=HTTPBasicAuth('apikey', api_key)) def get_request(url, api_key, **kwargs): print(\"GET", "values in `doc` object info = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"),", "in JSON as reviews reviews = json_result[\"body\"][\"data\"] # For each", "e.g., response = requests.get(url, params=params, headers={'Content-Type': 'application/json'}, # auth=HTTPBasicAuth('apikey', api_key))", "as dealers dealers = json_result[\"rows\"] for dealer in dealers: #", "json_result['rows'] logger.info(len(dealers)) for dealer in dealers: dlr_data = dealer['doc'] #print('ADDRESS',", "params=kwargs, auth=HTTPBasicAuth('apikey', api_key)) else: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs)", "(url, state): info = [] result = \"ok\" # Call", "cloud function def get_dealers_from_cf(url, **kwargs): info = [] result =", "json_data = json.loads(response.text) return json_data, status_code # Create a get_dealers_from_cf", "full_name=dlr_data.get(\"full_name\"), id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) # dealer_obj", "a DealerReview object with values in object #sentiment = analyze_review_sentiments(review[\"review\"])", "object with values in `doc` object dealer_obj = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"),", "api_key)) def get_request(url, api_key, **kwargs): print(\"GET from {}\".format(url)) print(kwargs) try:", "# Create a DealerReview object with values in object #sentiment", "text # def analyze_review_sentiments(text): # - Call get_request() with specified", "requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs) except: print(\"Network Error\") status_code = response.status_code", "= \"ok\" # Call get_request with a URL parameter json_result,", "== 200 and json_result: # Get the row list in", "arguments logger.info(\"Get Dealers from CF Called!\") json_result, status_code = get_request(url,", "st=dealer[\"doc\"][\"st\"], state=dealer[\"doc\"][\"state\"], zip=dealer[\"doc\"][\"zip\"]) info.append(dealer_obj) elif json_result: result = json_result[\"message\"] else:", "= CarDealer(address=dealer[\"doc\"][\"address\"], city=dealer[\"doc\"][\"city\"], full_name=dealer[\"doc\"][\"full_name\"], # id=dealer[\"doc\"][\"id\"], lat=dealer[\"doc\"][\"lat\"], long=dealer[\"doc\"][\"long\"], # short_name=dealer[\"doc\"][\"short_name\"],", "in JSON as dealers dealers = json_result[\"rows\"] for dealer in", "zip=dealer[\"zip\"]) elif json_result: result = json_result[\"message\"] else: result = \"Unknown", "None), car_model=review.get(\"car_model\", None), car_year=review.get(\"car_year\", None), purchase_date=review.get(\"purchase_date\", None)) info.append(review_obj) elif json_result:", "200 and json_result: # Get the row list in JSON", "result = \"ok\" # - Call get_request() with specified arguments", "# st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) info.append(dealer_obj) elif json_result: result = json_result[\"message\"] else:", "for dealer in dealers: # Create a CarDealer object with", "= json_result[\"rows\"] # For each dealer object for dealer in", "a URL parameter info = None result = \"ok\" json_result,", "requests import json # import related models here from .models", "return info, result def get_dealer_by_id(url, dealerId): # Call get_request with", "info.append(review_obj) elif json_result: result = json_result[\"message\"] else: result = \"Unknown", "Call get_request with a URL parameter json_result, status_code = get_request(url,", "Dealers from CF Called!\") json_result, status_code = get_request(url, None) if", "dlr_data = dealer['doc'] #print('ADDRESS', dlr_data[\"address\"]) if dlr_data.get('address'): # Create a", "get_request(url, None, dealerId=dealerId) # json_result, status_code = get_request(url, None, dealerId=dealerId)", "Create a CarDealer object with values in `doc` object info", "# Create a get_dealers_from_cf method to get dealers from a", "error\" return info, result def get_dealer_by_id(url, dealerId): # Call get_request", "# json_result, status_code = get_request(url, None, dealerId=dealerId) if status_code ==", "info, result def get_dealer_reviews_from_cf (url, dealerId): info = [] result", "state=dlr_data.get(\"state\"), st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) # dealer_obj = CarDealer(address=dealer[\"doc\"][\"address\"], city=dealer[\"doc\"][\"city\"], full_name=dealer[\"doc\"][\"full_name\"], #", "as reviews reviews = json_result[\"body\"][\"data\"] # For each review object", "def get_dealer_by_id(url, dealerId): # Call get_request with a URL parameter", "analyze_review_sentiments(review[\"review\"]) review_obj = DealerReview( id=review.get(\"id\"), name=review.get(\"name\"), review=review.get(\"review\"), purchase=review.get(\"purchase\"), car_make=review.get(\"car_make\", None),", "a get_dealers_from_cf method to get dealers from a cloud function", "info = CarDealer(address=dealer[\"address\"], city=dealer[\"city\"], full_name=dealer[\"full_name\"], # id=dealer[\"id\"], lat=dealer[\"lat\"], long=dealer[\"long\"], #", "zip=dlr_data.get(\"zip\")) info.append(dealer_obj) elif json_result: result = json_result[\"message\"] else: result =", "return info, result # Create an `analyze_review_sentiments` method to call", "result = \"ok\" json_result, status_code = get_request(url, None, dealerId=dealerId) #", "`analyze_review_sentiments` method to call Watson NLU and analyze text #", "headers={'Content-Type': 'application/json'}, params=kwargs) except: print(\"Network Error\") status_code = response.status_code print(\"With", "full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), st=dealer.get(\"st\"), state=dealer.get(\"state\"), zip=dealer.get(\"zip\")) # info", "and json_result: # Get the row list in JSON as", "to call Watson NLU and analyze text # def analyze_review_sentiments(text):", "dealer in dealers: dlr_data = dealer['doc'] #print('ADDRESS', dlr_data[\"address\"]) if dlr_data.get('address'):", "= get_request(url, None, dealerId=dealerId) # json_result, status_code = get_request(url, None,", "in JSON as dealers dealers = json_result[\"rows\"] # For each", "CarDealer object with values in `doc` object dealer_obj = CarDealer(address=dlr_data.get(\"address\"),", "result = \"Unknown error\" return info, result def get_dealer_by_id(url, dealerId):", "city=dealer[\"city\"], full_name=dealer[\"full_name\"], # id=dealer[\"id\"], lat=dealer[\"lat\"], long=dealer[\"long\"], # short_name=dealer[\"short_name\"], state=dealer[\"state\"], #", "\"Unknown error\" return info, result def get_dealers_by_state (url, state): info", "# id=dealer[\"id\"], lat=dealer[\"lat\"], long=dealer[\"long\"], # short_name=dealer[\"short_name\"], state=dealer[\"state\"], # st=dealer[\"st\"], zip=dealer[\"zip\"])", "GET requests # e.g., response = requests.get(url, params=params, headers={'Content-Type': 'application/json'},", "for review in reviews: if (dealerId == review.get(\"dealership\")): # Create", "object with values in `doc` object info = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"),", "make HTTP GET requests # e.g., response = requests.get(url, params=params,", "# id=dealer[\"doc\"][\"id\"], lat=dealer[\"doc\"][\"lat\"], long=dealer[\"doc\"][\"long\"], # short_name=dealer[\"doc\"][\"short_name\"], # st=dealer[\"doc\"][\"st\"], state=dealer[\"doc\"][\"state\"], zip=dealer[\"doc\"][\"zip\"])", "requests # e.g., response = requests.post(url, params=kwargs, json=payload) def post_request(url,", "values in `doc` object dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), id=dlr_data.get(\"id\"),", "\".format(url)) print(kwargs) print(json_payload) response = requests.post(url, headers={'Content-Type': 'application/json'}, params=kwargs, json=json_payload)", "analyze text # def analyze_review_sentiments(text): # - Call get_request() with", "info = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), st=dealer.get(\"st\"),", "return info, result def get_dealers_by_state (url, state): info = []", "lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), st=dlr_data.get(\"st\"), zip=dlr_data.get(\"zip\")) # dealer_obj = CarDealer(address=dealer[\"doc\"][\"address\"],", "\"Unknown error\" return info, result def get_dealer_by_id(url, dealerId): # Call", "print(\"Network Error\") status_code = response.status_code print(\"With status code {}\".format(status_code)) json_data", "a `get_request` to make HTTP GET requests # e.g., response", "'application/json'}, # auth=HTTPBasicAuth('apikey', api_key)) def get_request(url, api_key, **kwargs): print(\"GET from", "here from .models import CarDealer, DealerReview from requests.auth import HTTPBasicAuth", "`doc` object dealer_obj = CarDealer(address=dlr_data.get(\"address\"), city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"),", "HTTPBasicAuth import logging logger = logging.getLogger(__name__) # Create a `get_request`", "for dealer in dealers: dlr_data = dealer['doc'] #print('ADDRESS', dlr_data[\"address\"]) if", "e.g., response = requests.post(url, params=kwargs, json=payload) def post_request(url, json_payload, **kwargs):", "with values in `doc` object dealer_obj = CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"),", "status_code = get_request(url, None, state=state) if status_code == 200 and", "with values in object #sentiment = analyze_review_sentiments(review[\"review\"]) review_obj = DealerReview(", "dealer in dealers: # dlr_data = dealer[\"doc\"] # Create a", "URL parameter json_result, status_code = get_request(url, None, state=state) if status_code", "long=dealer[\"long\"], # short_name=dealer[\"short_name\"], state=dealer[\"state\"], # st=dealer[\"st\"], zip=dealer[\"zip\"]) elif json_result: result", "each review object for review in reviews: if (dealerId ==", "requests # e.g., response = requests.get(url, params=params, headers={'Content-Type': 'application/json'}, #", "else: result = \"Unknown error\" return info, result def get_dealers_by_state", "arguments # - Get the returned sentiment label such as", "# Create an `analyze_review_sentiments` method to call Watson NLU and", "url: {} \".format(url)) print(kwargs) print(json_payload) response = requests.post(url, headers={'Content-Type': 'application/json'},", "result = json_result[\"message\"] else: result = \"Unknown error\" return info,", "import HTTPBasicAuth import logging logger = logging.getLogger(__name__) # Create a", "{}\".format(status_code)) json_data = json.loads(response.text) return json_data, status_code # Create a", "else: result = \"Unknown error\" return info, result # Create", "Get the returned sentiment label such as Positive or Negative", "# For each review object for review in reviews: if", "full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), state=dealer.get(\"state\"), st=dealer.get(\"st\"), zip=dealer.get(\"zip\")) # dealer_obj", "not None: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey', api_key))", "= get_request(url, None) if status_code == 200 and json_result: dealers", "json=json_payload) status_code = response.status_code print(\"With status code {}\".format(status_code)) json_data =", "(url, dealerId): info = [] result = \"ok\" # Call", "CarDealer object with values in `doc` object info = CarDealer(address=dealer.get(\"address\"),", "CarDealer(address=dealer.get(\"address\"), city=dealer.get(\"city\"), full_name=dealer.get(\"full_name\"), id=dealer.get(\"id\"), lat=dealer.get(\"lat\"), long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), state=dealer.get(\"state\"), st=dealer.get(\"st\"), zip=dealer.get(\"zip\"))", "response = requests.get(url, params=params, headers={'Content-Type': 'application/json'}, # auth=HTTPBasicAuth('apikey', api_key)) def", "CarDealer, DealerReview from requests.auth import HTTPBasicAuth import logging logger =", "return json_data, status_code # Create a `post_request` to make HTTP", "\"ok\" # - Call get_request() with specified arguments logger.info(\"Get Dealers", "full_name=dealer[\"doc\"][\"full_name\"], # id=dealer[\"doc\"][\"id\"], lat=dealer[\"doc\"][\"lat\"], long=dealer[\"doc\"][\"long\"], # short_name=dealer[\"doc\"][\"short_name\"], # st=dealer[\"doc\"][\"st\"], state=dealer[\"doc\"][\"state\"],", "long=dealer.get(\"long\"), short_name=dealer.get(\"short_name\"), st=dealer.get(\"st\"), state=dealer.get(\"state\"), zip=dealer.get(\"zip\")) # info = CarDealer(address=dealer[\"address\"], city=dealer[\"city\"],", "\"Unknown error\" return info, result def get_dealer_reviews_from_cf (url, dealerId): info", "DealerReview( id=review.get(\"id\"), name=review.get(\"name\"), review=review.get(\"review\"), purchase=review.get(\"purchase\"), car_make=review.get(\"car_make\", None), car_model=review.get(\"car_model\", None), car_year=review.get(\"car_year\",", "city=dlr_data.get(\"city\"), full_name=dlr_data.get(\"full_name\"), # id=dlr_data.get(\"id\"), lat=dlr_data.get(\"lat\"), long=dlr_data.get(\"long\"), # short_name=dlr_data.get(\"short_name\"), state=dlr_data.get(\"state\"), #", "dealers: dlr_data = dealer['doc'] #print('ADDRESS', dlr_data[\"address\"]) if dlr_data.get('address'): # Create", "from CF Called!\") json_result, status_code = get_request(url, None) if status_code" ]
[ "\"Data for one hour\", \"startTime\": \"2018-12-21T08:00:00+01:00\", \"endTime\": \"2018-12-21T09:00:00+01:00\", } ######", "is None: upload.createEdition(datasetVersionEditionData) log.info(f\"Dataset: {upload.datasetId}\") log.info(f\"Version: {upload.datasetVersion}\") log.info(f\"Edition: {upload.datasetVersionEdition}\") if", "log = logging.getLogger() config = ConfigParser() config.read(\"config.ini\") ##### # Datasets", "\"phone\": \"12345678\", }, \"publisher\": \"Tim\", } datasetVersionData = {\"version\": \"6\",", "\"2019-05-28T15:37:00+02:00\", \"description\": \"Data for one hour\", \"startTime\": \"2018-12-21T08:00:00+01:00\", \"endTime\": \"2018-12-21T09:00:00+01:00\",", "else: log.error(\"Could not upload file....\") except Exception as e: log.exception(f\">>", "##### # Datasets to be added to metadata API datasetData", "try: log.info(\"Uploading a file to S3\") upload.login() if datasetId is", "config.ini this script will # not run the relevant DataUploader", "= logging.getLogger() config = ConfigParser() config.read(\"config.ini\") ##### # Datasets to", "if these are set in config.ini this script will #", "log.exception(f\">> Something went horrible wrong:\\n{e}\") # To upload with curl:", "logging from configparser import ConfigParser from sdk.data_uploader import DataUploader logging.basicConfig(level=logging.INFO)", "the relevant DataUploader function datasetId = config.get(\"dataUploader\", \"datasetId\", fallback=None) datasetVersion", "\"Test data\", \"keywords\": [\"test\"], \"accessRights\": \"non-public\", \"objective\": \"Formålsbeskrivelse\", \"contactPoint\": {", "{}} datasetVersionEditionData = { \"edition\": \"2019-05-28T15:37:00+02:00\", \"description\": \"Data for one", "log.info(\"Done... go brew some coffee\") else: log.error(\"Could not upload file....\")", "\"edition\": \"2019-05-28T15:37:00+02:00\", \"description\": \"Data for one hour\", \"startTime\": \"2018-12-21T08:00:00+01:00\", \"endTime\":", "# To upload with curl: cmd = upload.curl(\"tmp3.zip\") # Max", "DataUploader function datasetId = config.get(\"dataUploader\", \"datasetId\", fallback=None) datasetVersion = config.get(\"dataUploader\",", "datasetVersionEdition is None: upload.createEdition(datasetVersionEditionData) log.info(f\"Dataset: {upload.datasetId}\") log.info(f\"Version: {upload.datasetVersion}\") log.info(f\"Edition: {upload.datasetVersionEdition}\")", "\"contactPoint\": { \"name\": \"Tim\", \"email\": \"<EMAIL>\", \"phone\": \"12345678\", }, \"publisher\":", "file....\") except Exception as e: log.exception(f\">> Something went horrible wrong:\\n{e}\")", "config.read(\"config.ini\") ##### # Datasets to be added to metadata API", "datasetVersionEditionData = { \"edition\": \"2019-05-28T15:37:00+02:00\", \"description\": \"Data for one hour\",", "config.get( \"dataUploader\", \"datasetVersionEdition\", fallback=None ) upload = DataUploader(config) try: log.info(\"Uploading", "\"name\": \"Tim\", \"email\": \"<EMAIL>\", \"phone\": \"12345678\", }, \"publisher\": \"Tim\", }", "# Datasets to be added to metadata API datasetData =", "\"publisher\": \"Tim\", } datasetVersionData = {\"version\": \"6\", \"schema\": {}, \"transformation\":", "Something went horrible wrong:\\n{e}\") # To upload with curl: cmd", "datasetVersionData = {\"version\": \"6\", \"schema\": {}, \"transformation\": {}} datasetVersionEditionData =", "The dataset* variables are optional, if these are set in", "datasetVersion is None: upload.createVersion(datasetVersionData) if datasetVersionEdition is None: upload.createEdition(datasetVersionEditionData) log.info(f\"Dataset:", "upload.createDataset(datasetData) if datasetVersion is None: upload.createVersion(datasetVersionData) if datasetVersionEdition is None:", "None: upload.createDataset(datasetData) if datasetVersion is None: upload.createVersion(datasetVersionData) if datasetVersionEdition is", "fallback=None) datasetVersion = config.get(\"dataUploader\", \"datasetVersion\", fallback=None) datasetVersionEdition = config.get( \"dataUploader\",", "some coffee\") else: log.error(\"Could not upload file....\") except Exception as", "logging.getLogger() config = ConfigParser() config.read(\"config.ini\") ##### # Datasets to be", "\"schema\": {}, \"transformation\": {}} datasetVersionEditionData = { \"edition\": \"2019-05-28T15:37:00+02:00\", \"description\":", "= config.get(\"dataUploader\", \"datasetId\", fallback=None) datasetVersion = config.get(\"dataUploader\", \"datasetVersion\", fallback=None) datasetVersionEdition", "from configparser import ConfigParser from sdk.data_uploader import DataUploader logging.basicConfig(level=logging.INFO) log", "with curl: cmd = upload.curl(\"tmp3.zip\") # Max upload size for", "\"email\": \"<EMAIL>\", \"phone\": \"12345678\", }, \"publisher\": \"Tim\", } datasetVersionData =", "is None: upload.createVersion(datasetVersionData) if datasetVersionEdition is None: upload.createEdition(datasetVersionEditionData) log.info(f\"Dataset: {upload.datasetId}\")", "\"Tim\", \"email\": \"<EMAIL>\", \"phone\": \"12345678\", }, \"publisher\": \"Tim\", } datasetVersionData", "variables are optional, if these are set in config.ini this", "relevant DataUploader function datasetId = config.get(\"dataUploader\", \"datasetId\", fallback=None) datasetVersion =", "coffee\") else: log.error(\"Could not upload file....\") except Exception as e:", "= ConfigParser() config.read(\"config.ini\") ##### # Datasets to be added to", "{upload.datasetVersionEdition}\") if upload.upload(\"README.md\"): log.info(\"Done... go brew some coffee\") else: log.error(\"Could", "to S3\") upload.login() if datasetId is None: upload.createDataset(datasetData) if datasetVersion", "{upload.datasetVersion}\") log.info(f\"Edition: {upload.datasetVersionEdition}\") if upload.upload(\"README.md\"): log.info(\"Done... go brew some coffee\")", "\"6\", \"schema\": {}, \"transformation\": {}} datasetVersionEditionData = { \"edition\": \"2019-05-28T15:37:00+02:00\",", "\"endTime\": \"2018-12-21T09:00:00+01:00\", } ###### # The dataset* variables are optional,", "\"description\": \"Data for one hour\", \"startTime\": \"2018-12-21T08:00:00+01:00\", \"endTime\": \"2018-12-21T09:00:00+01:00\", }", "set in config.ini this script will # not run the", "config.get(\"dataUploader\", \"datasetVersion\", fallback=None) datasetVersionEdition = config.get( \"dataUploader\", \"datasetVersionEdition\", fallback=None )", "log.info(f\"Dataset: {upload.datasetId}\") log.info(f\"Version: {upload.datasetVersion}\") log.info(f\"Edition: {upload.datasetVersionEdition}\") if upload.upload(\"README.md\"): log.info(\"Done... go", "are set in config.ini this script will # not run", "not run the relevant DataUploader function datasetId = config.get(\"dataUploader\", \"datasetId\",", "fallback=None ) upload = DataUploader(config) try: log.info(\"Uploading a file to", "horrible wrong:\\n{e}\") # To upload with curl: cmd = upload.curl(\"tmp3.zip\")", "fallback=None) datasetVersionEdition = config.get( \"dataUploader\", \"datasetVersionEdition\", fallback=None ) upload =", "as e: log.exception(f\">> Something went horrible wrong:\\n{e}\") # To upload", "except Exception as e: log.exception(f\">> Something went horrible wrong:\\n{e}\") #", "wrong:\\n{e}\") # To upload with curl: cmd = upload.curl(\"tmp3.zip\") #", "{ \"edition\": \"2019-05-28T15:37:00+02:00\", \"description\": \"Data for one hour\", \"startTime\": \"2018-12-21T08:00:00+01:00\",", "# not run the relevant DataUploader function datasetId = config.get(\"dataUploader\",", "this script will # not run the relevant DataUploader function", "{upload.datasetId}\") log.info(f\"Version: {upload.datasetVersion}\") log.info(f\"Edition: {upload.datasetVersionEdition}\") if upload.upload(\"README.md\"): log.info(\"Done... go brew", "\"startTime\": \"2018-12-21T08:00:00+01:00\", \"endTime\": \"2018-12-21T09:00:00+01:00\", } ###### # The dataset* variables", "datasetId = config.get(\"dataUploader\", \"datasetId\", fallback=None) datasetVersion = config.get(\"dataUploader\", \"datasetVersion\", fallback=None)", "is None: upload.createDataset(datasetData) if datasetVersion is None: upload.createVersion(datasetVersionData) if datasetVersionEdition", "from sdk.data_uploader import DataUploader logging.basicConfig(level=logging.INFO) log = logging.getLogger() config =", "\"2018-12-21T09:00:00+01:00\", } ###### # The dataset* variables are optional, if", "brew some coffee\") else: log.error(\"Could not upload file....\") except Exception", "import DataUploader logging.basicConfig(level=logging.INFO) log = logging.getLogger() config = ConfigParser() config.read(\"config.ini\")", "upload = DataUploader(config) try: log.info(\"Uploading a file to S3\") upload.login()", "sdk.data_uploader import DataUploader logging.basicConfig(level=logging.INFO) log = logging.getLogger() config = ConfigParser()", "\"description\": \"Test data\", \"keywords\": [\"test\"], \"accessRights\": \"non-public\", \"objective\": \"Formålsbeskrivelse\", \"contactPoint\":", "{ \"title\": \"Test\", \"description\": \"Test data\", \"keywords\": [\"test\"], \"accessRights\": \"non-public\",", "ConfigParser from sdk.data_uploader import DataUploader logging.basicConfig(level=logging.INFO) log = logging.getLogger() config", "file to S3\") upload.login() if datasetId is None: upload.createDataset(datasetData) if", "S3\") upload.login() if datasetId is None: upload.createDataset(datasetData) if datasetVersion is", "hour\", \"startTime\": \"2018-12-21T08:00:00+01:00\", \"endTime\": \"2018-12-21T09:00:00+01:00\", } ###### # The dataset*", "= DataUploader(config) try: log.info(\"Uploading a file to S3\") upload.login() if", "} ###### # The dataset* variables are optional, if these", "curl: cmd = upload.curl(\"tmp3.zip\") # Max upload size for now", "go brew some coffee\") else: log.error(\"Could not upload file....\") except", "if upload.upload(\"README.md\"): log.info(\"Done... go brew some coffee\") else: log.error(\"Could not", "if datasetVersion is None: upload.createVersion(datasetVersionData) if datasetVersionEdition is None: upload.createEdition(datasetVersionEditionData)", "# The dataset* variables are optional, if these are set", "\"transformation\": {}} datasetVersionEditionData = { \"edition\": \"2019-05-28T15:37:00+02:00\", \"description\": \"Data for", "= config.get(\"dataUploader\", \"datasetVersion\", fallback=None) datasetVersionEdition = config.get( \"dataUploader\", \"datasetVersionEdition\", fallback=None", "e: log.exception(f\">> Something went horrible wrong:\\n{e}\") # To upload with", "log.error(\"Could not upload file....\") except Exception as e: log.exception(f\">> Something", "went horrible wrong:\\n{e}\") # To upload with curl: cmd =", "}, \"publisher\": \"Tim\", } datasetVersionData = {\"version\": \"6\", \"schema\": {},", "= {\"version\": \"6\", \"schema\": {}, \"transformation\": {}} datasetVersionEditionData = {", "\"datasetId\", fallback=None) datasetVersion = config.get(\"dataUploader\", \"datasetVersion\", fallback=None) datasetVersionEdition = config.get(", "these are set in config.ini this script will # not", "Exception as e: log.exception(f\">> Something went horrible wrong:\\n{e}\") # To", "\"keywords\": [\"test\"], \"accessRights\": \"non-public\", \"objective\": \"Formålsbeskrivelse\", \"contactPoint\": { \"name\": \"Tim\",", "to be added to metadata API datasetData = { \"title\":", "are optional, if these are set in config.ini this script", "added to metadata API datasetData = { \"title\": \"Test\", \"description\":", "\"2018-12-21T08:00:00+01:00\", \"endTime\": \"2018-12-21T09:00:00+01:00\", } ###### # The dataset* variables are", "to metadata API datasetData = { \"title\": \"Test\", \"description\": \"Test", "cmd = upload.curl(\"tmp3.zip\") # Max upload size for now is", "be added to metadata API datasetData = { \"title\": \"Test\",", "config = ConfigParser() config.read(\"config.ini\") ##### # Datasets to be added", "data\", \"keywords\": [\"test\"], \"accessRights\": \"non-public\", \"objective\": \"Formålsbeskrivelse\", \"contactPoint\": { \"name\":", "run the relevant DataUploader function datasetId = config.get(\"dataUploader\", \"datasetId\", fallback=None)", "\"dataUploader\", \"datasetVersionEdition\", fallback=None ) upload = DataUploader(config) try: log.info(\"Uploading a", "\"Formålsbeskrivelse\", \"contactPoint\": { \"name\": \"Tim\", \"email\": \"<EMAIL>\", \"phone\": \"12345678\", },", "dataset* variables are optional, if these are set in config.ini", "script will # not run the relevant DataUploader function datasetId", "configparser import ConfigParser from sdk.data_uploader import DataUploader logging.basicConfig(level=logging.INFO) log =", "log.info(\"Uploading a file to S3\") upload.login() if datasetId is None:", "function datasetId = config.get(\"dataUploader\", \"datasetId\", fallback=None) datasetVersion = config.get(\"dataUploader\", \"datasetVersion\",", "datasetId is None: upload.createDataset(datasetData) if datasetVersion is None: upload.createVersion(datasetVersionData) if", "\"datasetVersionEdition\", fallback=None ) upload = DataUploader(config) try: log.info(\"Uploading a file", "ConfigParser() config.read(\"config.ini\") ##### # Datasets to be added to metadata", "in config.ini this script will # not run the relevant", "config.get(\"dataUploader\", \"datasetId\", fallback=None) datasetVersion = config.get(\"dataUploader\", \"datasetVersion\", fallback=None) datasetVersionEdition =", "API datasetData = { \"title\": \"Test\", \"description\": \"Test data\", \"keywords\":", "not upload file....\") except Exception as e: log.exception(f\">> Something went", "upload.createEdition(datasetVersionEditionData) log.info(f\"Dataset: {upload.datasetId}\") log.info(f\"Version: {upload.datasetVersion}\") log.info(f\"Edition: {upload.datasetVersionEdition}\") if upload.upload(\"README.md\"): log.info(\"Done...", "[\"test\"], \"accessRights\": \"non-public\", \"objective\": \"Formålsbeskrivelse\", \"contactPoint\": { \"name\": \"Tim\", \"email\":", "will # not run the relevant DataUploader function datasetId =", "metadata API datasetData = { \"title\": \"Test\", \"description\": \"Test data\",", "if datasetId is None: upload.createDataset(datasetData) if datasetVersion is None: upload.createVersion(datasetVersionData)", "\"Tim\", } datasetVersionData = {\"version\": \"6\", \"schema\": {}, \"transformation\": {}}", "{ \"name\": \"Tim\", \"email\": \"<EMAIL>\", \"phone\": \"12345678\", }, \"publisher\": \"Tim\",", "logging.basicConfig(level=logging.INFO) log = logging.getLogger() config = ConfigParser() config.read(\"config.ini\") ##### #", "datasetVersion = config.get(\"dataUploader\", \"datasetVersion\", fallback=None) datasetVersionEdition = config.get( \"dataUploader\", \"datasetVersionEdition\",", "log.info(f\"Version: {upload.datasetVersion}\") log.info(f\"Edition: {upload.datasetVersionEdition}\") if upload.upload(\"README.md\"): log.info(\"Done... go brew some", "upload.upload(\"README.md\"): log.info(\"Done... go brew some coffee\") else: log.error(\"Could not upload", "upload file....\") except Exception as e: log.exception(f\">> Something went horrible", "upload.login() if datasetId is None: upload.createDataset(datasetData) if datasetVersion is None:", "{}, \"transformation\": {}} datasetVersionEditionData = { \"edition\": \"2019-05-28T15:37:00+02:00\", \"description\": \"Data", "for one hour\", \"startTime\": \"2018-12-21T08:00:00+01:00\", \"endTime\": \"2018-12-21T09:00:00+01:00\", } ###### #", "= { \"edition\": \"2019-05-28T15:37:00+02:00\", \"description\": \"Data for one hour\", \"startTime\":", "DataUploader(config) try: log.info(\"Uploading a file to S3\") upload.login() if datasetId", "{\"version\": \"6\", \"schema\": {}, \"transformation\": {}} datasetVersionEditionData = { \"edition\":", "= config.get( \"dataUploader\", \"datasetVersionEdition\", fallback=None ) upload = DataUploader(config) try:", "upload with curl: cmd = upload.curl(\"tmp3.zip\") # Max upload size", "log.info(f\"Edition: {upload.datasetVersionEdition}\") if upload.upload(\"README.md\"): log.info(\"Done... go brew some coffee\") else:", "= upload.curl(\"tmp3.zip\") # Max upload size for now is 5GB", "To upload with curl: cmd = upload.curl(\"tmp3.zip\") # Max upload", ") upload = DataUploader(config) try: log.info(\"Uploading a file to S3\")", "if datasetVersionEdition is None: upload.createEdition(datasetVersionEditionData) log.info(f\"Dataset: {upload.datasetId}\") log.info(f\"Version: {upload.datasetVersion}\") log.info(f\"Edition:", "\"non-public\", \"objective\": \"Formålsbeskrivelse\", \"contactPoint\": { \"name\": \"Tim\", \"email\": \"<EMAIL>\", \"phone\":", "import ConfigParser from sdk.data_uploader import DataUploader logging.basicConfig(level=logging.INFO) log = logging.getLogger()", "} datasetVersionData = {\"version\": \"6\", \"schema\": {}, \"transformation\": {}} datasetVersionEditionData", "\"12345678\", }, \"publisher\": \"Tim\", } datasetVersionData = {\"version\": \"6\", \"schema\":", "= { \"title\": \"Test\", \"description\": \"Test data\", \"keywords\": [\"test\"], \"accessRights\":", "DataUploader logging.basicConfig(level=logging.INFO) log = logging.getLogger() config = ConfigParser() config.read(\"config.ini\") #####", "datasetData = { \"title\": \"Test\", \"description\": \"Test data\", \"keywords\": [\"test\"],", "\"title\": \"Test\", \"description\": \"Test data\", \"keywords\": [\"test\"], \"accessRights\": \"non-public\", \"objective\":", "upload.createVersion(datasetVersionData) if datasetVersionEdition is None: upload.createEdition(datasetVersionEditionData) log.info(f\"Dataset: {upload.datasetId}\") log.info(f\"Version: {upload.datasetVersion}\")", "None: upload.createEdition(datasetVersionEditionData) log.info(f\"Dataset: {upload.datasetId}\") log.info(f\"Version: {upload.datasetVersion}\") log.info(f\"Edition: {upload.datasetVersionEdition}\") if upload.upload(\"README.md\"):", "\"datasetVersion\", fallback=None) datasetVersionEdition = config.get( \"dataUploader\", \"datasetVersionEdition\", fallback=None ) upload", "optional, if these are set in config.ini this script will", "one hour\", \"startTime\": \"2018-12-21T08:00:00+01:00\", \"endTime\": \"2018-12-21T09:00:00+01:00\", } ###### # The", "\"Test\", \"description\": \"Test data\", \"keywords\": [\"test\"], \"accessRights\": \"non-public\", \"objective\": \"Formålsbeskrivelse\",", "###### # The dataset* variables are optional, if these are", "\"<EMAIL>\", \"phone\": \"12345678\", }, \"publisher\": \"Tim\", } datasetVersionData = {\"version\":", "Datasets to be added to metadata API datasetData = {", "import logging from configparser import ConfigParser from sdk.data_uploader import DataUploader", "\"accessRights\": \"non-public\", \"objective\": \"Formålsbeskrivelse\", \"contactPoint\": { \"name\": \"Tim\", \"email\": \"<EMAIL>\",", "\"objective\": \"Formålsbeskrivelse\", \"contactPoint\": { \"name\": \"Tim\", \"email\": \"<EMAIL>\", \"phone\": \"12345678\",", "a file to S3\") upload.login() if datasetId is None: upload.createDataset(datasetData)", "None: upload.createVersion(datasetVersionData) if datasetVersionEdition is None: upload.createEdition(datasetVersionEditionData) log.info(f\"Dataset: {upload.datasetId}\") log.info(f\"Version:", "datasetVersionEdition = config.get( \"dataUploader\", \"datasetVersionEdition\", fallback=None ) upload = DataUploader(config)" ]
[ "a sanbaiman # #################################################################### tiles = TilesConverter.string_to_136_array(man='22244466677788') win_tile = TilesConverter.string_to_136_array(man='7')[0]", "#################################################################### # we had to use all 14 tiles in", "had to use all 14 tiles in that array tiles", "= calculator.estimate_hand_value(tiles, win_tile) print_hand_result(result) #################################################################### # Tanyao hand by tsumo", "# #################################################################### # we had to use all 14 tiles", "# we had to use all 14 tiles in that", "TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile) print_hand_result(result) #################################################################### # Tanyao hand", "import TilesConverter calculator = HandCalculator() # useful helper def print_hand_result(hand_result):", "# Change the cost of yaku # #################################################################### config =", "mahjong.hand_calculating.hand import HandCalculator from mahjong.meld import Meld from mahjong.hand_calculating.hand_config import", "import Meld from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules from mahjong.shanten import", "array tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result", "yaku # #################################################################### config = HandConfig(is_renhou=True) # renhou as an", "# useful helper def print_hand_result(hand_result): print(hand_result.han, hand_result.fu) print(hand_result.cost['main']) print(hand_result.yaku) for", "pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile, config=config)", "calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result) #################################################################### # Shanten calculation #", "by tsumo # #################################################################### result = calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True)) print_hand_result(result)", "win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile) print_hand_result(result) #################################################################### #", "melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result) #################################################################### # Shanten calculation # #################################################################### shanten", "Shanten from mahjong.tile import TilesConverter calculator = HandCalculator() # useful", "in that array tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile =", "] dora_indicators = [ TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], ] config", "= TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles,", "#################################################################### tiles = TilesConverter.string_to_136_array(man='22244466677788') win_tile = TilesConverter.string_to_136_array(man='7')[0] melds = [", "print(fu_item) print('') #################################################################### # Tanyao hand by ron # ####################################################################", "print(hand_result.han, hand_result.fu) print(hand_result.cost['main']) print(hand_result.yaku) for fu_item in hand_result.fu_details: print(fu_item) print('')", "calculator.estimate_hand_value(tiles, win_tile) print_hand_result(result) #################################################################### # Tanyao hand by tsumo #", "use all 14 tiles in that array tiles = TilesConverter.string_to_136_array(man='22444',", "win_tile, config=HandConfig(is_tsumo=True)) print_hand_result(result) #################################################################### # Add open set to hand", "# Kazoe as a sanbaiman # #################################################################### tiles = TilesConverter.string_to_136_array(man='22244466677788')", "cost of yaku # #################################################################### config = HandConfig(is_renhou=True) # renhou", "# #################################################################### shanten = Shanten() tiles = TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443')", "of yaku # #################################################################### config = HandConfig(is_renhou=True) # renhou as", "hand by tsumo # #################################################################### result = calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True))", "yakuman - old style config.yaku.renhou.han_closed = 13 tiles = TilesConverter.string_to_136_array(man='22444',", "as a sanbaiman # #################################################################### tiles = TilesConverter.string_to_136_array(man='22244466677788') win_tile =", "#################################################################### # Kazoe as a sanbaiman # #################################################################### tiles =", "#################################################################### # Shanten calculation # #################################################################### shanten = Shanten() tiles", "pin='123459', sou='443') result = shanten.calculate_shanten(tiles) print(result) #################################################################### # Kazoe as", "melds = [ Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False) ] dora_indicators = [", "# Tanyao hand by ron # #################################################################### # we had", "[Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))] result = calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result) ####################################################################", "print(hand_result.yaku) for fu_item in hand_result.fu_details: print(fu_item) print('') #################################################################### # Tanyao", "config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result = calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators,", "Shanten calculation # #################################################################### shanten = Shanten() tiles = TilesConverter.string_to_34_array(man='13569',", "import HandConfig, OptionalRules from mahjong.shanten import Shanten from mahjong.tile import", "tsumo # #################################################################### result = calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True)) print_hand_result(result) ####################################################################", "#################################################################### result = calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True)) print_hand_result(result) #################################################################### # Add", "= 13 tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0]", "# Add open set to hand # #################################################################### melds =", "# #################################################################### melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))] result = calculator.estimate_hand_value(tiles, win_tile,", "= [ TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], ] config = HandConfig(is_riichi=True,", "calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True)) print_hand_result(result) #################################################################### # Add open set to", "calculator = HandCalculator() # useful helper def print_hand_result(hand_result): print(hand_result.han, hand_result.fu)", "Shanten() tiles = TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443') result = shanten.calculate_shanten(tiles) print(result)", "hand by ron # #################################################################### # we had to use", "sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile) print_hand_result(result) ####################################################################", "Meld from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules from mahjong.shanten import Shanten", "for fu_item in hand_result.fu_details: print(fu_item) print('') #################################################################### # Tanyao hand", "#################################################################### shanten = Shanten() tiles = TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443') result", "renhou as an yakuman - old style config.yaku.renhou.han_closed = 13", "open set to hand # #################################################################### melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))]", "an yakuman - old style config.yaku.renhou.han_closed = 13 tiles =", "= TilesConverter.string_to_136_array(man='22244466677788') win_tile = TilesConverter.string_to_136_array(man='7')[0] melds = [ Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'),", "win_tile, melds, dora_indicators, config) print_hand_result(result) #################################################################### # Change the cost", "def print_hand_result(hand_result): print(hand_result.han, hand_result.fu) print(hand_result.cost['main']) print(hand_result.yaku) for fu_item in hand_result.fu_details:", "mahjong.meld import Meld from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules from mahjong.shanten", "= [ Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False) ] dora_indicators = [ TilesConverter.string_to_136_array(man='1')[0],", "from mahjong.hand_calculating.hand import HandCalculator from mahjong.meld import Meld from mahjong.hand_calculating.hand_config", "mahjong.tile import TilesConverter calculator = HandCalculator() # useful helper def", "TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile)", "hand_result.fu_details: print(fu_item) print('') #################################################################### # Tanyao hand by ron #", "we had to use all 14 tiles in that array", "hand_result.fu) print(hand_result.cost['main']) print(hand_result.yaku) for fu_item in hand_result.fu_details: print(fu_item) print('') ####################################################################", "to use all 14 tiles in that array tiles =", "TilesConverter.string_to_136_array(man='7')[0] melds = [ Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False) ] dora_indicators =", "[ Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False) ] dora_indicators = [ TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0],", "config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result) #################################################################### # Shanten calculation # #################################################################### shanten =", "tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result =", "#################################################################### # Tanyao hand by ron # #################################################################### # we", "pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile) print_hand_result(result)", "TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443') result = shanten.calculate_shanten(tiles) print(result) #################################################################### # Kazoe", "melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))] result = calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True)))", "win_tile = TilesConverter.string_to_136_array(man='7')[0] melds = [ Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False) ]", "print_hand_result(hand_result): print(hand_result.han, hand_result.fu) print(hand_result.cost['main']) print(hand_result.yaku) for fu_item in hand_result.fu_details: print(fu_item)", "= TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile) print_hand_result(result) #################################################################### # Tanyao", "Tanyao hand by ron # #################################################################### # we had to", "TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], ] config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result", "print(result) #################################################################### # Kazoe as a sanbaiman # #################################################################### tiles", "= HandCalculator() # useful helper def print_hand_result(hand_result): print(hand_result.han, hand_result.fu) print(hand_result.cost['main'])", "TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], ] config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result =", "] config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result = calculator.estimate_hand_value(tiles, win_tile, melds,", "# #################################################################### config = HandConfig(is_renhou=True) # renhou as an yakuman", "= [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))] result = calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result)", "TilesConverter.string_to_136_array(man='2222'), False) ] dora_indicators = [ TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0],", "print_hand_result(result) #################################################################### # Tanyao hand by tsumo # #################################################################### result", "# Shanten calculation # #################################################################### shanten = Shanten() tiles =", "OptionalRules from mahjong.shanten import Shanten from mahjong.tile import TilesConverter calculator", "result = calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result) #################################################################### # Shanten", "in hand_result.fu_details: print(fu_item) print('') #################################################################### # Tanyao hand by ron", "config.yaku.renhou.han_closed = 13 tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile =", "HandConfig(is_renhou=True) # renhou as an yakuman - old style config.yaku.renhou.han_closed", "as an yakuman - old style config.yaku.renhou.han_closed = 13 tiles", "config) print_hand_result(result) #################################################################### # Change the cost of yaku #", "#################################################################### # Tanyao hand by tsumo # #################################################################### result =", "shanten.calculate_shanten(tiles) print(result) #################################################################### # Kazoe as a sanbaiman # ####################################################################", "options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result = calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config) print_hand_result(result) ####################################################################", "sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile, config=config) print_hand_result(result)", "sanbaiman # #################################################################### tiles = TilesConverter.string_to_136_array(man='22244466677788') win_tile = TilesConverter.string_to_136_array(man='7')[0] melds", "calculation # #################################################################### shanten = Shanten() tiles = TilesConverter.string_to_34_array(man='13569', pin='123459',", "the cost of yaku # #################################################################### config = HandConfig(is_renhou=True) #", "Add open set to hand # #################################################################### melds = [Meld(meld_type=Meld.PON,", "= calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config) print_hand_result(result) #################################################################### # Change", "HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result = calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config) print_hand_result(result)", "calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config) print_hand_result(result) #################################################################### # Change the", "print_hand_result(result) #################################################################### # Add open set to hand # ####################################################################", "HandCalculator() # useful helper def print_hand_result(hand_result): print(hand_result.han, hand_result.fu) print(hand_result.cost['main']) print(hand_result.yaku)", "result = calculator.estimate_hand_value(tiles, win_tile) print_hand_result(result) #################################################################### # Tanyao hand by", "that array tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0]", "tiles=TilesConverter.string_to_136_array(man='444'))] result = calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result) #################################################################### #", "HandConfig, OptionalRules from mahjong.shanten import Shanten from mahjong.tile import TilesConverter", "result = calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True)) print_hand_result(result) #################################################################### # Add open", "old style config.yaku.renhou.han_closed = 13 tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444')", "TilesConverter.string_to_136_array(man='22244466677788') win_tile = TilesConverter.string_to_136_array(man='7')[0] melds = [ Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False)", "[ TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], ] config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN))", "TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], ] config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result = calculator.estimate_hand_value(tiles,", "= Shanten() tiles = TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443') result = shanten.calculate_shanten(tiles)", "from mahjong.meld import Meld from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules from", "import Shanten from mahjong.tile import TilesConverter calculator = HandCalculator() #", "mahjong.shanten import Shanten from mahjong.tile import TilesConverter calculator = HandCalculator()", "by ron # #################################################################### # we had to use all", "from mahjong.tile import TilesConverter calculator = HandCalculator() # useful helper", "ron # #################################################################### # we had to use all 14", "dora_indicators = [ TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], ] config =", "dora_indicators, config) print_hand_result(result) #################################################################### # Change the cost of yaku", "Change the cost of yaku # #################################################################### config = HandConfig(is_renhou=True)", "style config.yaku.renhou.han_closed = 13 tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile", "import HandCalculator from mahjong.meld import Meld from mahjong.hand_calculating.hand_config import HandConfig,", "TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile,", "tiles in that array tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile", "set to hand # #################################################################### melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))] result", "- old style config.yaku.renhou.han_closed = 13 tiles = TilesConverter.string_to_136_array(man='22444', pin='333567',", "print('') #################################################################### # Tanyao hand by ron # #################################################################### #", "result = shanten.calculate_shanten(tiles) print(result) #################################################################### # Kazoe as a sanbaiman", "14 tiles in that array tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444')", "HandCalculator from mahjong.meld import Meld from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules", "<gh_stars>100-1000 from mahjong.hand_calculating.hand import HandCalculator from mahjong.meld import Meld from", "#################################################################### # Add open set to hand # #################################################################### melds", "= HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result = calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config)", "False) ] dora_indicators = [ TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], ]", "13 tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result", "melds, dora_indicators, config) print_hand_result(result) #################################################################### # Change the cost of", "#################################################################### # Change the cost of yaku # #################################################################### config", "# #################################################################### tiles = TilesConverter.string_to_136_array(man='22244466677788') win_tile = TilesConverter.string_to_136_array(man='7')[0] melds =", "to hand # #################################################################### melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))] result =", "# renhou as an yakuman - old style config.yaku.renhou.han_closed =", "= shanten.calculate_shanten(tiles) print(result) #################################################################### # Kazoe as a sanbaiman #", "Tanyao hand by tsumo # #################################################################### result = calculator.estimate_hand_value(tiles, win_tile,", "= TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443') result = shanten.calculate_shanten(tiles) print(result) #################################################################### #", "useful helper def print_hand_result(hand_result): print(hand_result.han, hand_result.fu) print(hand_result.cost['main']) print(hand_result.yaku) for fu_item", "print(hand_result.cost['main']) print(hand_result.yaku) for fu_item in hand_result.fu_details: print(fu_item) print('') #################################################################### #", "from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules from mahjong.shanten import Shanten from", "# Tanyao hand by tsumo # #################################################################### result = calculator.estimate_hand_value(tiles,", "Kazoe as a sanbaiman # #################################################################### tiles = TilesConverter.string_to_136_array(man='22244466677788') win_tile", "helper def print_hand_result(hand_result): print(hand_result.han, hand_result.fu) print(hand_result.cost['main']) print(hand_result.yaku) for fu_item in", "= calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result) #################################################################### # Shanten calculation", "sou='443') result = shanten.calculate_shanten(tiles) print(result) #################################################################### # Kazoe as a", "Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False) ] dora_indicators = [ TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0],", "TilesConverter.string_to_136_array(man='1')[0], ] config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result = calculator.estimate_hand_value(tiles, win_tile,", "config = HandConfig(is_renhou=True) # renhou as an yakuman - old", "#################################################################### melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))] result = calculator.estimate_hand_value(tiles, win_tile, melds=melds,", "TilesConverter calculator = HandCalculator() # useful helper def print_hand_result(hand_result): print(hand_result.han,", "print_hand_result(result) #################################################################### # Shanten calculation # #################################################################### shanten = Shanten()", "fu_item in hand_result.fu_details: print(fu_item) print('') #################################################################### # Tanyao hand by", "win_tile) print_hand_result(result) #################################################################### # Tanyao hand by tsumo # ####################################################################", "hand # #################################################################### melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))] result = calculator.estimate_hand_value(tiles,", "win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result) #################################################################### # Shanten calculation # ####################################################################", "tiles = TilesConverter.string_to_136_array(man='22244466677788') win_tile = TilesConverter.string_to_136_array(man='7')[0] melds = [ Meld(Meld.KAN,", "shanten = Shanten() tiles = TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443') result =", "= TilesConverter.string_to_136_array(man='7')[0] melds = [ Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False) ] dora_indicators", "result = calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config) print_hand_result(result) #################################################################### #", "= calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True)) print_hand_result(result) #################################################################### # Add open set", "from mahjong.shanten import Shanten from mahjong.tile import TilesConverter calculator =", "tiles = TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443') result = shanten.calculate_shanten(tiles) print(result) ####################################################################", "# #################################################################### result = calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True)) print_hand_result(result) #################################################################### #", "all 14 tiles in that array tiles = TilesConverter.string_to_136_array(man='22444', pin='333567',", "#################################################################### config = HandConfig(is_renhou=True) # renhou as an yakuman -", "print_hand_result(result) #################################################################### # Change the cost of yaku # ####################################################################", "mahjong.hand_calculating.hand_config import HandConfig, OptionalRules from mahjong.shanten import Shanten from mahjong.tile", "= HandConfig(is_renhou=True) # renhou as an yakuman - old style", "config=HandConfig(is_tsumo=True)) print_hand_result(result) #################################################################### # Add open set to hand #" ]
[ "def _get_entry_format(self, entry): return entry.get_format_solaris() def mount_entry(self, entry): args =", "return entry.get_format_solaris() def mount_entry(self, entry): args = [\"-F\", entry.get_typename(), entry.get_fsname(),", "def mount_entry(self, entry): args = [\"-F\", entry.get_typename(), entry.get_fsname(), entry.get_dirname()] args.extend(self._format_options(entry))", "entry.get_format_solaris() def mount_entry(self, entry): args = [\"-F\", entry.get_typename(), entry.get_fsname(), entry.get_dirname()]", "import MounterMixin, execute_mount class SolarisMounterMixin(MounterMixin): def _get_fstab_path(self): return \"/etc/fstab\" def", "SolarisMounterMixin(MounterMixin): def _get_fstab_path(self): return \"/etc/fstab\" def _get_entry_format(self, entry): return entry.get_format_solaris()", "from ..base.mounter import MounterMixin, execute_mount class SolarisMounterMixin(MounterMixin): def _get_fstab_path(self): return", "class SolarisMounterMixin(MounterMixin): def _get_fstab_path(self): return \"/etc/fstab\" def _get_entry_format(self, entry): return", "_get_fstab_path(self): return \"/etc/fstab\" def _get_entry_format(self, entry): return entry.get_format_solaris() def mount_entry(self,", "return \"/etc/fstab\" def _get_entry_format(self, entry): return entry.get_format_solaris() def mount_entry(self, entry):", "_get_entry_format(self, entry): return entry.get_format_solaris() def mount_entry(self, entry): args = [\"-F\",", "mount_entry(self, entry): args = [\"-F\", entry.get_typename(), entry.get_fsname(), entry.get_dirname()] args.extend(self._format_options(entry)) execute_mount(args)", "execute_mount class SolarisMounterMixin(MounterMixin): def _get_fstab_path(self): return \"/etc/fstab\" def _get_entry_format(self, entry):", "MounterMixin, execute_mount class SolarisMounterMixin(MounterMixin): def _get_fstab_path(self): return \"/etc/fstab\" def _get_entry_format(self,", "..base.mounter import MounterMixin, execute_mount class SolarisMounterMixin(MounterMixin): def _get_fstab_path(self): return \"/etc/fstab\"", "\"/etc/fstab\" def _get_entry_format(self, entry): return entry.get_format_solaris() def mount_entry(self, entry): args", "def _get_fstab_path(self): return \"/etc/fstab\" def _get_entry_format(self, entry): return entry.get_format_solaris() def", "entry): return entry.get_format_solaris() def mount_entry(self, entry): args = [\"-F\", entry.get_typename()," ]
[ "database_management_config=None, database_version=None, db_id=None, db_packs=None, db_unique_name=None, defined_tags=None, display_name=None, external_non_container_database_id=None, freeform_tags=None, id=None,", "db_packs=None, db_unique_name=None, defined_tags=None, display_name=None, external_non_container_database_id=None, freeform_tags=None, id=None, lifecycle_details=None, ncharacter_set=None, operations_insights_config=None,", "@pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" The date and time", "[OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure external database resource. \"\"\"", "isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a str\")", "dict\") pulumi.set(__self__, \"database_management_config\", database_management_config) if database_version and not isinstance(database_version, str):", "time_created=None, time_zone=None): if character_set and not isinstance(character_set, str): raise TypeError(\"Expected", "str: \"\"\" The Oracle Database ID, which identifies an Oracle", "def db_packs(self) -> str: \"\"\" The database packs licensed for", "resource. Each key is predefined and scoped to a namespace.", "to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). \"\"\"", "def database_edition(self) -> str: \"\"\" The Oracle Database edition. \"\"\"", "@pulumi.getter(name=\"externalNonContainerDatabaseId\") def external_non_container_database_id(self) -> str: return pulumi.get(self, \"external_non_container_database_id\") @property @pulumi.getter(name=\"freeformTags\")", "return pulumi.get(self, \"external_non_container_database_id\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]:", "specific External Non Container Database resource in Oracle Cloud Infrastructure", "external_non_container_database_id=None, freeform_tags=None, id=None, lifecycle_details=None, ncharacter_set=None, operations_insights_config=None, state=None, time_created=None, time_zone=None): if", "outside of Oracle Cloud. \"\"\" return pulumi.get(self, \"db_id\") @property @pulumi.getter(name=\"dbPacks\")", "Management service. \"\"\" return pulumi.get(self, \"database_management_config\") @property @pulumi.getter(name=\"databaseVersion\") def database_version(self)", "db_packs=self.db_packs, db_unique_name=self.db_unique_name, defined_tags=self.defined_tags, display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags, id=self.id, lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config,", "Tool. *** # *** Do not edit by hand unless", "isinstance(state, str): raise TypeError(\"Expected argument 'state' to be a str\")", "be a str\") pulumi.set(__self__, \"state\", state) if time_created and not", "TypeError(\"Expected argument 'database_edition' to be a str\") pulumi.set(__self__, \"database_edition\", database_edition)", "scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).", "\"time_zone\", time_zone) @property @pulumi.getter(name=\"characterSet\") def character_set(self) -> str: \"\"\" The", "argument 'db_id' to be a str\") pulumi.set(__self__, \"db_id\", db_id) if", "for the external Oracle Database. \"\"\" return pulumi.get(self, \"db_packs\") @property", "pulumi.set(__self__, \"database_edition\", database_edition) if database_management_config and not isinstance(database_management_config, dict): raise", "str): raise TypeError(\"Expected argument 'database_configuration' to be a str\") pulumi.set(__self__,", "str): raise TypeError(\"Expected argument 'compartment_id' to be a str\") pulumi.set(__self__,", "a str\") pulumi.set(__self__, \"ncharacter_set\", ncharacter_set) if operations_insights_config and not isinstance(operations_insights_config,", "not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to be a", "The user-friendly name for the external database. The name does", "be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if database_configuration and not", "freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags for this resource.", "raise TypeError(\"Expected argument 'db_packs' to be a str\") pulumi.set(__self__, \"db_packs\",", "character_set(self) -> str: \"\"\" The character set of the external", "name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).", "False: yield self return GetExternalNonContainerDatabaseResult( character_set=self.character_set, compartment_id=self.compartment_id, database_configuration=self.database_configuration, database_edition=self.database_edition, database_management_config=self.database_management_config,", "database_management_config=self.database_management_config, database_version=self.database_version, db_id=self.db_id, db_packs=self.db_packs, db_unique_name=self.db_unique_name, defined_tags=self.defined_tags, display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags, id=self.id,", "None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version =", "str\") pulumi.set(__self__, \"external_non_container_database_id\", external_non_container_database_id) if freeform_tags and not isinstance(freeform_tags, dict):", "argument 'lifecycle_details' to be a str\") pulumi.set(__self__, \"lifecycle_details\", lifecycle_details) if", "database_version) if db_id and not isinstance(db_id, str): raise TypeError(\"Expected argument", "pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" The", "be a str\") pulumi.set(__self__, \"id\", id) if lifecycle_details and not", "a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if database_configuration and not isinstance(database_configuration,", "def time_created(self) -> str: \"\"\" The date and time the", "\"\"\" The time zone of the external database. It is", "\"freeform_tags\", freeform_tags) if id and not isinstance(id, str): raise TypeError(\"Expected", "return GetExternalNonContainerDatabaseResult( character_set=self.character_set, compartment_id=self.compartment_id, database_configuration=self.database_configuration, database_edition=self.database_edition, database_management_config=self.database_management_config, database_version=self.database_version, db_id=self.db_id, db_packs=self.db_packs,", "a specific external non-container database. ## Example Usage ```python import", "\"ncharacter_set\") @property @pulumi.getter(name=\"operationsInsightsConfig\") def operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': \"\"\" The configuration", "non-container database. ## Example Usage ```python import pulumi import pulumi_oci", "get_external_non_container_database(external_non_container_database_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExternalNonContainerDatabaseResult:", "if database_edition and not isinstance(database_edition, str): raise TypeError(\"Expected argument 'database_edition'", "defined_tags=self.defined_tags, display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags, id=self.id, lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config, state=self.state, time_created=self.time_created,", "if opts is None: opts = pulumi.InvokeOptions() if opts.version is", "time zone offset (a character type in the format '[+|-]TZH:TZM')", "be a str\") pulumi.set(__self__, \"db_packs\", db_packs) if db_unique_name and not", "in Oracle Cloud Infrastructure Database service. Gets information about a", "\"\"\" return pulumi.get(self, \"database_management_config\") @property @pulumi.getter(name=\"databaseVersion\") def database_version(self) -> str:", "Cloud Infrastructure external database resource. \"\"\" return pulumi.get(self, \"id\") @property", "time_zone) @property @pulumi.getter(name=\"characterSet\") def character_set(self) -> str: \"\"\" The character", "'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if display_name", "\"freeform_tags\") @property @pulumi.getter def id(self) -> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm)", "\"db_id\", db_id) if db_packs and not isinstance(db_packs, str): raise TypeError(\"Expected", "db_packs(self) -> str: \"\"\" The database packs licensed for the", "pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence,", "'database_configuration' to be a str\") pulumi.set(__self__, \"database_configuration\", database_configuration) if database_edition", "and not isinstance(database_version, str): raise TypeError(\"Expected argument 'database_version' to be", "not isinstance(time_zone, str): raise TypeError(\"Expected argument 'time_zone' to be a", "pulumi.set(__self__, \"display_name\", display_name) if external_non_container_database_id and not isinstance(external_non_container_database_id, str): raise", "def character_set(self) -> str: \"\"\" The character set of the", "the external database. \"\"\" return pulumi.get(self, \"character_set\") @property @pulumi.getter(name=\"compartmentId\") def", "the external database. \"\"\" return pulumi.get(self, \"ncharacter_set\") @property @pulumi.getter(name=\"operationsInsightsConfig\") def", "to be a dict\") pulumi.set(__self__, \"operations_insights_config\", operations_insights_config) if state and", "time_created and not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to", "disable=using-constant-test def __await__(self): if False: yield self return GetExternalNonContainerDatabaseResult( character_set=self.character_set,", "a str\") pulumi.set(__self__, \"time_zone\", time_zone) @property @pulumi.getter(name=\"characterSet\") def character_set(self) ->", "if db_id and not isinstance(db_id, str): raise TypeError(\"Expected argument 'db_id'", "\"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"lifecycleDetails\") def lifecycle_details(self) -> str:", "lifecycle_details(self) -> str: \"\"\" Additional information about the current lifecycle", "@property @pulumi.getter(name=\"databaseVersion\") def database_version(self) -> str: \"\"\" The Oracle Database", "pulumi.get(self, \"character_set\") @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The", "str: return pulumi.get(self, \"external_non_container_database_id\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str,", "to be a str\") pulumi.set(__self__, \"time_created\", time_created) if time_zone and", "state=self.state, time_created=self.time_created, time_zone=self.time_zone) def get_external_non_container_database(external_non_container_database_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions]", "\"display_name\", display_name) if external_non_container_database_id and not isinstance(external_non_container_database_id, str): raise TypeError(\"Expected", "external_non_container_database_id and not isinstance(external_non_container_database_id, str): raise TypeError(\"Expected argument 'external_non_container_database_id' to", "character_set=self.character_set, compartment_id=self.compartment_id, database_configuration=self.database_configuration, database_edition=self.database_edition, database_management_config=self.database_management_config, database_version=self.database_version, db_id=self.db_id, db_packs=self.db_packs, db_unique_name=self.db_unique_name, defined_tags=self.defined_tags,", "isinstance(external_non_container_database_id, str): raise TypeError(\"Expected argument 'external_non_container_database_id' to be a str\")", "\"database_version\") @property @pulumi.getter(name=\"dbId\") def db_id(self) -> str: \"\"\" The Oracle", "compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config, database_version=__ret__.database_version, db_id=__ret__.db_id, db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags, display_name=__ret__.display_name,", "Usage ```python import pulumi import pulumi_oci as oci test_external_non_container_database =", "character_set and not isinstance(character_set, str): raise TypeError(\"Expected argument 'character_set' to", "pulumi.set(__self__, \"character_set\", character_set) if compartment_id and not isinstance(compartment_id, str): raise", "lifecycle state. \"\"\" return pulumi.get(self, \"lifecycle_details\") @property @pulumi.getter(name=\"ncharacterSet\") def ncharacter_set(self)", "raise TypeError(\"Expected argument 'character_set' to be a str\") pulumi.set(__self__, \"character_set\",", "_utilities from . import outputs __all__ = [ 'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult',", "the compartment. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"databaseConfiguration\") def database_configuration(self)", "def database_configuration(self) -> str: \"\"\" The Oracle Database configuration \"\"\"", "\"db_packs\", db_packs) if db_unique_name and not isinstance(db_unique_name, str): raise TypeError(\"Expected", "compartment_id=self.compartment_id, database_configuration=self.database_configuration, database_edition=self.database_edition, database_management_config=self.database_management_config, database_version=self.database_version, db_id=self.db_id, db_packs=self.db_packs, db_unique_name=self.db_unique_name, defined_tags=self.defined_tags, display_name=self.display_name,", "str: \"\"\" The national character of the external database. \"\"\"", "argument 'external_non_container_database_id' to be a str\") pulumi.set(__self__, \"external_non_container_database_id\", external_non_container_database_id) if", "def __init__(__self__, character_set=None, compartment_id=None, database_configuration=None, database_edition=None, database_management_config=None, database_version=None, db_id=None, db_packs=None,", "resource. Each tag is a simple key-value pair with no", "@pulumi.getter(name=\"databaseEdition\") def database_edition(self) -> str: \"\"\" The Oracle Database edition.", "'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database', ] @pulumi.output_type class GetExternalNonContainerDatabaseResult: \"\"\" A collection of", "if db_packs and not isinstance(db_packs, str): raise TypeError(\"Expected argument 'db_packs'", "'db_packs' to be a str\") pulumi.set(__self__, \"db_packs\", db_packs) if db_unique_name", "to be a str\") pulumi.set(__self__, \"db_packs\", db_packs) if db_unique_name and", "The configuration of the Database Management service. \"\"\" return pulumi.get(self,", "\"\"\" The current state of the Oracle Cloud Infrastructure external", "str: \"\"\" The date and time the database was created.", "argument 'character_set' to be a str\") pulumi.set(__self__, \"character_set\", character_set) if", "TypeError(\"Expected argument 'database_management_config' to be a dict\") pulumi.set(__self__, \"database_management_config\", database_management_config)", "'time_zone' to be a str\") pulumi.set(__self__, \"time_zone\", time_zone) @property @pulumi.getter(name=\"characterSet\")", "`DB_UNIQUE_NAME` of the external database. \"\"\" return pulumi.get(self, \"db_unique_name\") @property", "```python import pulumi import pulumi_oci as oci test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database[\"test_external_non_container_database\"][\"id\"])", "TypeError(\"Expected argument 'state' to be a str\") pulumi.set(__self__, \"state\", state)", "'database_edition' to be a str\") pulumi.set(__self__, \"database_edition\", database_edition) if database_management_config", "a str\") pulumi.set(__self__, \"id\", id) if lifecycle_details and not isinstance(lifecycle_details,", "of Oracle Cloud. \"\"\" return pulumi.get(self, \"db_id\") @property @pulumi.getter(name=\"dbPacks\") def", "\"db_id\") @property @pulumi.getter(name=\"dbPacks\") def db_packs(self) -> str: \"\"\" The database", "if False: yield self return GetExternalNonContainerDatabaseResult( character_set=self.character_set, compartment_id=self.compartment_id, database_configuration=self.database_configuration, database_edition=self.database_edition,", "external_non_container_database_id: The external non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). \"\"\" __args__ = dict()", "@pulumi.getter(name=\"operationsInsightsConfig\") def operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': \"\"\" The configuration of Operations", "The Oracle Database ID, which identifies an Oracle Database located", "generated by the Pulumi Terraform Bridge (tfgen) Tool. *** #", "\"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. \"\"\" return pulumi.get(self, \"compartment_id\")", "-> str: \"\"\" The database packs licensed for the external", "\"time_created\", time_created) if time_zone and not isinstance(time_zone, str): raise TypeError(\"Expected", "zone region name, depending on how the time zone value", "The national character of the external database. \"\"\" return pulumi.get(self,", "pulumi.get(self, \"ncharacter_set\") @property @pulumi.getter(name=\"operationsInsightsConfig\") def operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': \"\"\" The", "@pulumi.getter(name=\"timeZone\") def time_zone(self) -> str: \"\"\" The time zone of", "licensed for the external Oracle Database. \"\"\" return pulumi.get(self, \"db_packs\")", "information about a specific external non-container database. ## Example Usage", "character_set=None, compartment_id=None, database_configuration=None, database_edition=None, database_management_config=None, database_version=None, db_id=None, db_packs=None, db_unique_name=None, defined_tags=None,", "\"\"\" A collection of values returned by getExternalNonContainerDatabase. \"\"\" def", "of the external database. \"\"\" return pulumi.get(self, \"character_set\") @property @pulumi.getter(name=\"compartmentId\")", "tag is a simple key-value pair with no predefined name,", "The Oracle Database edition. \"\"\" return pulumi.get(self, \"database_edition\") @property @pulumi.getter(name=\"databaseManagementConfig\")", "The configuration of Operations Insights for the external database \"\"\"", "and not isinstance(db_id, str): raise TypeError(\"Expected argument 'db_id' to be", "to be unique. \"\"\" return pulumi.get(self, \"display_name\") @property @pulumi.getter(name=\"externalNonContainerDatabaseId\") def", "configuration of Operations Insights for the external database \"\"\" return", "isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to be a str\")", "-> str: \"\"\" The Oracle Database configuration \"\"\" return pulumi.get(self,", "pulumi.get(self, \"time_zone\") class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): # pylint: disable=using-constant-test def __await__(self): if", "TypeError(\"Expected argument 'time_zone' to be a str\") pulumi.set(__self__, \"time_zone\", time_zone)", "@property @pulumi.getter(name=\"dbPacks\") def db_packs(self) -> str: \"\"\" The database packs", "time zone value was specified when the database was created", "not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a", "data source provides details about a specific External Non Container", "last altered. \"\"\" return pulumi.get(self, \"time_zone\") class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): # pylint:", "pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"databaseConfiguration\") def database_configuration(self) -> str: \"\"\" The", "external non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). \"\"\" __args__ = dict() __args__['externalNonContainerDatabaseId'] =", "= None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExternalNonContainerDatabaseResult: \"\"\" This", "ncharacter_set) if operations_insights_config and not isinstance(operations_insights_config, dict): raise TypeError(\"Expected argument", "configuration \"\"\" return pulumi.get(self, \"database_configuration\") @property @pulumi.getter(name=\"databaseEdition\") def database_edition(self) ->", "def lifecycle_details(self) -> str: \"\"\" Additional information about the current", "if defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags'", "\"\"\" return pulumi.get(self, \"character_set\") @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str:", "is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts,", "str: \"\"\" The user-friendly name for the external database. The", "ncharacter_set and not isinstance(ncharacter_set, str): raise TypeError(\"Expected argument 'ncharacter_set' to", "packs licensed for the external Oracle Database. \"\"\" return pulumi.get(self,", "raise TypeError(\"Expected argument 'display_name' to be a str\") pulumi.set(__self__, \"display_name\",", "'database_version' to be a str\") pulumi.set(__self__, \"database_version\", database_version) if db_id", "and not isinstance(ncharacter_set, str): raise TypeError(\"Expected argument 'ncharacter_set' to be", "character_set=__ret__.character_set, compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config, database_version=__ret__.database_version, db_id=__ret__.db_id, db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags,", "id=None, lifecycle_details=None, ncharacter_set=None, operations_insights_config=None, state=None, time_created=None, time_zone=None): if character_set and", "raise TypeError(\"Expected argument 'database_edition' to be a str\") pulumi.set(__self__, \"database_edition\",", "database. The name does not have to be unique. \"\"\"", "external database \"\"\" return pulumi.get(self, \"operations_insights_config\") @property @pulumi.getter def state(self)", "pulumi.get(self, \"id\") @property @pulumi.getter(name=\"lifecycleDetails\") def lifecycle_details(self) -> str: \"\"\" Additional", "\"lifecycle_details\", lifecycle_details) if ncharacter_set and not isinstance(ncharacter_set, str): raise TypeError(\"Expected", "to be a str\") pulumi.set(__self__, \"db_unique_name\", db_unique_name) if defined_tags and", "hand unless you're certain you know what you are doing!", "not isinstance(external_non_container_database_id, str): raise TypeError(\"Expected argument 'external_non_container_database_id' to be a", "str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure external", "def db_unique_name(self) -> str: \"\"\" The `DB_UNIQUE_NAME` of the external", "was created. \"\"\" return pulumi.get(self, \"time_created\") @property @pulumi.getter(name=\"timeZone\") def time_zone(self)", "Oracle Database. \"\"\" return pulumi.get(self, \"db_packs\") @property @pulumi.getter(name=\"dbUniqueName\") def db_unique_name(self)", "oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database[\"test_external_non_container_database\"][\"id\"]) ``` :param str external_non_container_database_id: The external non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm).", "source provides details about a specific External Non Container Database", "on how the time zone value was specified when the", "import _utilities from . import outputs __all__ = [ 'GetExternalNonContainerDatabaseResult',", "'get_external_non_container_database', ] @pulumi.output_type class GetExternalNonContainerDatabaseResult: \"\"\" A collection of values", "argument 'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if", "The database packs licensed for the external Oracle Database. \"\"\"", "return pulumi.get(self, \"database_management_config\") @property @pulumi.getter(name=\"databaseVersion\") def database_version(self) -> str: \"\"\"", "-> Mapping[str, Any]: \"\"\" Defined tags for this resource. Each", "\"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure external database", "not isinstance(ncharacter_set, str): raise TypeError(\"Expected argument 'ncharacter_set' to be a", "return pulumi.get(self, \"time_created\") @property @pulumi.getter(name=\"timeZone\") def time_zone(self) -> str: \"\"\"", "a dict\") pulumi.set(__self__, \"operations_insights_config\", operations_insights_config) if state and not isinstance(state,", "a str\") pulumi.set(__self__, \"external_non_container_database_id\", external_non_container_database_id) if freeform_tags and not isinstance(freeform_tags,", "unique. \"\"\" return pulumi.get(self, \"display_name\") @property @pulumi.getter(name=\"externalNonContainerDatabaseId\") def external_non_container_database_id(self) ->", "unless you're certain you know what you are doing! ***", "@property @pulumi.getter(name=\"databaseEdition\") def database_edition(self) -> str: \"\"\" The Oracle Database", "was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***", "to be a str\") pulumi.set(__self__, \"time_zone\", time_zone) @property @pulumi.getter(name=\"characterSet\") def", "database. \"\"\" return pulumi.get(self, \"db_unique_name\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) ->", "key is predefined and scoped to a namespace. For more", "the external database. The name does not have to be", "and time the database was created. \"\"\" return pulumi.get(self, \"time_created\")", "= None) -> AwaitableGetExternalNonContainerDatabaseResult: \"\"\" This data source provides details", "AwaitableGetExternalNonContainerDatabaseResult: \"\"\" This data source provides details about a specific", "TypeError(\"Expected argument 'db_packs' to be a str\") pulumi.set(__self__, \"db_packs\", db_packs)", "if time_created and not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created'", "class GetExternalNonContainerDatabaseResult: \"\"\" A collection of values returned by getExternalNonContainerDatabase.", "isinstance(character_set, str): raise TypeError(\"Expected argument 'character_set' to be a str\")", "Container Database resource in Oracle Cloud Infrastructure Database service. Gets", "Non Container Database resource in Oracle Cloud Infrastructure Database service.", "be a str\") pulumi.set(__self__, \"db_id\", db_id) if db_packs and not", "oci test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database[\"test_external_non_container_database\"][\"id\"]) ``` :param str external_non_container_database_id: The external", ". import outputs __all__ = [ 'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database', ]", "current lifecycle state. \"\"\" return pulumi.get(self, \"lifecycle_details\") @property @pulumi.getter(name=\"ncharacterSet\") def", "external database. \"\"\" return pulumi.get(self, \"ncharacter_set\") @property @pulumi.getter(name=\"operationsInsightsConfig\") def operations_insights_config(self)", "external_non_container_database_id(self) -> str: return pulumi.get(self, \"external_non_container_database_id\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self)", "of the Oracle Cloud Infrastructure external database resource. \"\"\" return", "opts is None: opts = pulumi.InvokeOptions() if opts.version is None:", "to be a str\") pulumi.set(__self__, \"lifecycle_details\", lifecycle_details) if ncharacter_set and", "isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to be a str\")", "def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags for this", "Oracle Cloud Infrastructure external database resource. \"\"\" return pulumi.get(self, \"state\")", "defined_tags=__ret__.defined_tags, display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id, freeform_tags=__ret__.freeform_tags, id=__ret__.id, lifecycle_details=__ret__.lifecycle_details, ncharacter_set=__ret__.ncharacter_set, operations_insights_config=__ret__.operations_insights_config, state=__ret__.state, time_created=__ret__.time_created,", "db_unique_name and not isinstance(db_unique_name, str): raise TypeError(\"Expected argument 'db_unique_name' to", "The Oracle Database version. \"\"\" return pulumi.get(self, \"database_version\") @property @pulumi.getter(name=\"dbId\")", "by the Pulumi Terraform Bridge (tfgen) Tool. *** # ***", "which identifies an Oracle Database located outside of Oracle Cloud.", "is a time zone offset (a character type in the", "pulumi.set(__self__, \"operations_insights_config\", operations_insights_config) if state and not isinstance(state, str): raise", "argument 'display_name' to be a str\") pulumi.set(__self__, \"display_name\", display_name) if", "\"\"\" The database packs licensed for the external Oracle Database.", "'display_name' to be a str\") pulumi.set(__self__, \"display_name\", display_name) if external_non_container_database_id", "isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to be a dict\")", "Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not", "by getExternalNonContainerDatabase. \"\"\" def __init__(__self__, character_set=None, compartment_id=None, database_configuration=None, database_edition=None, database_management_config=None,", "pulumi.set(__self__, \"freeform_tags\", freeform_tags) if id and not isinstance(id, str): raise", "dict): raise TypeError(\"Expected argument 'operations_insights_config' to be a dict\") pulumi.set(__self__,", "and not isinstance(time_zone, str): raise TypeError(\"Expected argument 'time_zone' to be", "lifecycle_details and not isinstance(lifecycle_details, str): raise TypeError(\"Expected argument 'lifecycle_details' to", "of the external database. It is a time zone offset", "@pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the", "AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self", "\"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" The date", "import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union,", "type in the format '[+|-]TZH:TZM') or a time zone region", "database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config, database_version=__ret__.database_version, db_id=__ret__.db_id, db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags, display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id,", "what you are doing! *** import warnings import pulumi import", "str: \"\"\" The time zone of the external database. It", "getExternalNonContainerDatabase. \"\"\" def __init__(__self__, character_set=None, compartment_id=None, database_configuration=None, database_edition=None, database_management_config=None, database_version=None,", "-> Mapping[str, Any]: \"\"\" Free-form tags for this resource. Each", "have to be unique. \"\"\" return pulumi.get(self, \"display_name\") @property @pulumi.getter(name=\"externalNonContainerDatabaseId\")", "\"\"\" return pulumi.get(self, \"ncharacter_set\") @property @pulumi.getter(name=\"operationsInsightsConfig\") def operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult':", "= _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value return AwaitableGetExternalNonContainerDatabaseResult(", "lifecycle_details) if ncharacter_set and not isinstance(ncharacter_set, str): raise TypeError(\"Expected argument", "about a specific external non-container database. ## Example Usage ```python", "= pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value return AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set, compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration,", "def display_name(self) -> str: \"\"\" The user-friendly name for the", "and not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to be", "time the database was created. \"\"\" return pulumi.get(self, \"time_created\") @property", "details about a specific External Non Container Database resource in", "typ=GetExternalNonContainerDatabaseResult).value return AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set, compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config, database_version=__ret__.database_version, db_id=__ret__.db_id,", "db_packs and not isinstance(db_packs, str): raise TypeError(\"Expected argument 'db_packs' to", "a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if id and not isinstance(id,", "pulumi.set(__self__, \"time_zone\", time_zone) @property @pulumi.getter(name=\"characterSet\") def character_set(self) -> str: \"\"\"", "pulumi.get(self, \"database_version\") @property @pulumi.getter(name=\"dbId\") def db_id(self) -> str: \"\"\" The", "with no predefined name, type, or namespace. For more information,", "opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version()", "be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if id and not", "str: \"\"\" The current state of the Oracle Cloud Infrastructure", "def ncharacter_set(self) -> str: \"\"\" The national character of the", "\"\"\" The `DB_UNIQUE_NAME` of the external database. \"\"\" return pulumi.get(self,", "the database was created / last altered. \"\"\" return pulumi.get(self,", "Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExternalNonContainerDatabaseResult: \"\"\"", "Oracle Cloud Infrastructure Database service. Gets information about a specific", "pulumi.set(__self__, \"defined_tags\", defined_tags) if display_name and not isinstance(display_name, str): raise", "argument 'time_created' to be a str\") pulumi.set(__self__, \"time_created\", time_created) if", "pulumi import pulumi_oci as oci test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database[\"test_external_non_container_database\"][\"id\"]) ``` :param", "db_id=__ret__.db_id, db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags, display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id, freeform_tags=__ret__.freeform_tags, id=__ret__.id, lifecycle_details=__ret__.lifecycle_details, ncharacter_set=__ret__.ncharacter_set,", "argument 'database_configuration' to be a str\") pulumi.set(__self__, \"database_configuration\", database_configuration) if", "be a str\") pulumi.set(__self__, \"database_configuration\", database_configuration) if database_edition and not", "date and time the database was created. \"\"\" return pulumi.get(self,", "test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database[\"test_external_non_container_database\"][\"id\"]) ``` :param str external_non_container_database_id: The external non-container", "lifecycle_details=None, ncharacter_set=None, operations_insights_config=None, state=None, time_created=None, time_zone=None): if character_set and not", "TypeError(\"Expected argument 'database_version' to be a str\") pulumi.set(__self__, \"database_version\", database_version)", "def time_zone(self) -> str: \"\"\" The time zone of the", "set of the external database. \"\"\" return pulumi.get(self, \"character_set\") @property", "time_zone(self) -> str: \"\"\" The time zone of the external", "# pylint: disable=using-constant-test def __await__(self): if False: yield self return", "'db_unique_name' to be a str\") pulumi.set(__self__, \"db_unique_name\", db_unique_name) if defined_tags", "'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if database_configuration", "database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config, database_version=__ret__.database_version, db_id=__ret__.db_id, db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags, display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id, freeform_tags=__ret__.freeform_tags,", "TypeError(\"Expected argument 'ncharacter_set' to be a str\") pulumi.set(__self__, \"ncharacter_set\", ncharacter_set)", "database_edition(self) -> str: \"\"\" The Oracle Database edition. \"\"\" return", "return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\"", "freeform_tags=self.freeform_tags, id=self.id, lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config, state=self.state, time_created=self.time_created, time_zone=self.time_zone) def get_external_non_container_database(external_non_container_database_id:", "str\") pulumi.set(__self__, \"database_configuration\", database_configuration) if database_edition and not isinstance(database_edition, str):", "return pulumi.get(self, \"db_id\") @property @pulumi.getter(name=\"dbPacks\") def db_packs(self) -> str: \"\"\"", "warnings import pulumi import pulumi.runtime from typing import Any, Mapping,", "external_non_container_database_id if opts is None: opts = pulumi.InvokeOptions() if opts.version", "str\") pulumi.set(__self__, \"lifecycle_details\", lifecycle_details) if ncharacter_set and not isinstance(ncharacter_set, str):", "the external database. It is a time zone offset (a", "database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': \"\"\" The configuration of the Database Management", "-> str: \"\"\" The Oracle Database edition. \"\"\" return pulumi.get(self,", "The `DB_UNIQUE_NAME` of the external database. \"\"\" return pulumi.get(self, \"db_unique_name\")", "'operations_insights_config' to be a dict\") pulumi.set(__self__, \"operations_insights_config\", operations_insights_config) if state", "not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to be a", "database_edition) if database_management_config and not isinstance(database_management_config, dict): raise TypeError(\"Expected argument", "not isinstance(db_unique_name, str): raise TypeError(\"Expected argument 'db_unique_name' to be a", "raise TypeError(\"Expected argument 'lifecycle_details' to be a str\") pulumi.set(__self__, \"lifecycle_details\",", "database_edition and not isinstance(database_edition, str): raise TypeError(\"Expected argument 'database_edition' to", "# coding=utf-8 # *** WARNING: this file was generated by", "external database. \"\"\" return pulumi.get(self, \"db_unique_name\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self)", "@property @pulumi.getter(name=\"ncharacterSet\") def ncharacter_set(self) -> str: \"\"\" The national character", "depending on how the time zone value was specified when", "import warnings import pulumi import pulumi.runtime from typing import Any,", "str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. \"\"\" return pulumi.get(self,", "pulumi.get(self, \"lifecycle_details\") @property @pulumi.getter(name=\"ncharacterSet\") def ncharacter_set(self) -> str: \"\"\" The", "@property @pulumi.getter(name=\"characterSet\") def character_set(self) -> str: \"\"\" The character set", "pulumi.get(self, \"db_packs\") @property @pulumi.getter(name=\"dbUniqueName\") def db_unique_name(self) -> str: \"\"\" The", "isinstance(database_configuration, str): raise TypeError(\"Expected argument 'database_configuration' to be a str\")", "display_name=None, external_non_container_database_id=None, freeform_tags=None, id=None, lifecycle_details=None, ncharacter_set=None, operations_insights_config=None, state=None, time_created=None, time_zone=None):", "@property @pulumi.getter(name=\"displayName\") def display_name(self) -> str: \"\"\" The user-friendly name", "@pulumi.getter def state(self) -> str: \"\"\" The current state of", "*** import warnings import pulumi import pulumi.runtime from typing import", "compartment. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"databaseConfiguration\") def database_configuration(self) ->", "dict): raise TypeError(\"Expected argument 'freeform_tags' to be a dict\") pulumi.set(__self__,", "[OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). \"\"\" __args__ = dict() __args__['externalNonContainerDatabaseId'] = external_non_container_database_id if opts", "database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). \"\"\" __args__ = dict() __args__['externalNonContainerDatabaseId'] = external_non_container_database_id if", "AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set, compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config, database_version=__ret__.database_version, db_id=__ret__.db_id, db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name,", "str): raise TypeError(\"Expected argument 'state' to be a str\") pulumi.set(__self__,", "you know what you are doing! *** import warnings import", "a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if display_name and not isinstance(display_name,", "\"operations_insights_config\", operations_insights_config) if state and not isinstance(state, str): raise TypeError(\"Expected", "freeform_tags=None, id=None, lifecycle_details=None, ncharacter_set=None, operations_insights_config=None, state=None, time_created=None, time_zone=None): if character_set", "dict): raise TypeError(\"Expected argument 'database_management_config' to be a dict\") pulumi.set(__self__,", "-> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': \"\"\" The configuration of the Database Management service.", "pulumi.set(__self__, \"compartment_id\", compartment_id) if database_configuration and not isinstance(database_configuration, str): raise", "and not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to be", "not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to be a", "dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if id and not isinstance(id, str):", "located outside of Oracle Cloud. \"\"\" return pulumi.get(self, \"db_id\") @property", "-> str: \"\"\" The current state of the Oracle Cloud", "'id' to be a str\") pulumi.set(__self__, \"id\", id) if lifecycle_details", "opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__,", "does not have to be unique. \"\"\" return pulumi.get(self, \"display_name\")", "argument 'id' to be a str\") pulumi.set(__self__, \"id\", id) if", "a str\") pulumi.set(__self__, \"db_id\", db_id) if db_packs and not isinstance(db_packs,", "The character set of the external database. \"\"\" return pulumi.get(self,", "operations_insights_config) if state and not isinstance(state, str): raise TypeError(\"Expected argument", "pylint: disable=using-constant-test def __await__(self): if False: yield self return GetExternalNonContainerDatabaseResult(", "str\") pulumi.set(__self__, \"db_id\", db_id) if db_packs and not isinstance(db_packs, str):", "def external_non_container_database_id(self) -> str: return pulumi.get(self, \"external_non_container_database_id\") @property @pulumi.getter(name=\"freeformTags\") def", "-> str: \"\"\" The `DB_UNIQUE_NAME` of the external database. \"\"\"", "\"\"\" The user-friendly name for the external database. The name", "name for the external database. The name does not have", "database was created. \"\"\" return pulumi.get(self, \"time_created\") @property @pulumi.getter(name=\"timeZone\") def", "\"compartment_id\") @property @pulumi.getter(name=\"databaseConfiguration\") def database_configuration(self) -> str: \"\"\" The Oracle", "simple key-value pair with no predefined name, type, or namespace.", "__args__['externalNonContainerDatabaseId'] = external_non_container_database_id if opts is None: opts = pulumi.InvokeOptions()", "Oracle Cloud Infrastructure external database resource. \"\"\" return pulumi.get(self, \"id\")", "this resource. Each key is predefined and scoped to a", "raise TypeError(\"Expected argument 'database_version' to be a str\") pulumi.set(__self__, \"database_version\",", "time zone of the external database. It is a time", "a str\") pulumi.set(__self__, \"state\", state) if time_created and not isinstance(time_created,", "str\") pulumi.set(__self__, \"time_zone\", time_zone) @property @pulumi.getter(name=\"characterSet\") def character_set(self) -> str:", "to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if database_configuration and", "the external database. \"\"\" return pulumi.get(self, \"db_unique_name\") @property @pulumi.getter(name=\"definedTags\") def", "def id(self) -> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle", "raise TypeError(\"Expected argument 'database_management_config' to be a dict\") pulumi.set(__self__, \"database_management_config\",", "Database service. Gets information about a specific external non-container database.", "external database resource. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"lifecycleDetails\") def", "\"\"\" The date and time the database was created. \"\"\"", "resource. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"lifecycleDetails\") def lifecycle_details(self) ->", "an Oracle Database located outside of Oracle Cloud. \"\"\" return", "\"character_set\", character_set) if compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected", "return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"lifecycleDetails\") def lifecycle_details(self) -> str: \"\"\"", "see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\")", "external database. \"\"\" return pulumi.get(self, \"character_set\") @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self)", "of the external database. \"\"\" return pulumi.get(self, \"ncharacter_set\") @property @pulumi.getter(name=\"operationsInsightsConfig\")", "str): raise TypeError(\"Expected argument 'character_set' to be a str\") pulumi.set(__self__,", "Database located outside of Oracle Cloud. \"\"\" return pulumi.get(self, \"db_id\")", "db_id) if db_packs and not isinstance(db_packs, str): raise TypeError(\"Expected argument", "'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': \"\"\" The configuration of Operations Insights for the external", "a time zone region name, depending on how the time", "isinstance(database_version, str): raise TypeError(\"Expected argument 'database_version' to be a str\")", "to be a str\") pulumi.set(__self__, \"character_set\", character_set) if compartment_id and", "a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). \"\"\" return", "a dict\") pulumi.set(__self__, \"database_management_config\", database_management_config) if database_version and not isinstance(database_version,", "\"\"\" def __init__(__self__, character_set=None, compartment_id=None, database_configuration=None, database_edition=None, database_management_config=None, database_version=None, db_id=None,", "Infrastructure external database resource. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\")", "Insights for the external database \"\"\" return pulumi.get(self, \"operations_insights_config\") @property", "@pulumi.getter def id(self) -> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the", "not isinstance(character_set, str): raise TypeError(\"Expected argument 'character_set' to be a", "be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if display_name and not", "be a str\") pulumi.set(__self__, \"display_name\", display_name) if external_non_container_database_id and not", "and not isinstance(db_unique_name, str): raise TypeError(\"Expected argument 'db_unique_name' to be", "'lifecycle_details' to be a str\") pulumi.set(__self__, \"lifecycle_details\", lifecycle_details) if ncharacter_set", "this file was generated by the Pulumi Terraform Bridge (tfgen)", "identifies an Oracle Database located outside of Oracle Cloud. \"\"\"", "user-friendly name for the external database. The name does not", "The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. \"\"\" return pulumi.get(self, \"compartment_id\") @property", "a str\") pulumi.set(__self__, \"character_set\", character_set) if compartment_id and not isinstance(compartment_id,", "state and not isinstance(state, str): raise TypeError(\"Expected argument 'state' to", "*** # *** Do not edit by hand unless you're", "compartment_id) if database_configuration and not isinstance(database_configuration, str): raise TypeError(\"Expected argument", "id) if lifecycle_details and not isinstance(lifecycle_details, str): raise TypeError(\"Expected argument", "\"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter def id(self) -> str:", "\"db_unique_name\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined", "*** Do not edit by hand unless you're certain you", "@property @pulumi.getter def id(self) -> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of", "edition. \"\"\" return pulumi.get(self, \"database_edition\") @property @pulumi.getter(name=\"databaseManagementConfig\") def database_management_config(self) ->", "\"defined_tags\", defined_tags) if display_name and not isinstance(display_name, str): raise TypeError(\"Expected", "TypeError(\"Expected argument 'operations_insights_config' to be a dict\") pulumi.set(__self__, \"operations_insights_config\", operations_insights_config)", "overload from .. import _utilities from . import outputs __all__", "more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). \"\"\" return pulumi.get(self, \"defined_tags\") @property", "is a simple key-value pair with no predefined name, type,", "character type in the format '[+|-]TZH:TZM') or a time zone", "Oracle Database version. \"\"\" return pulumi.get(self, \"database_version\") @property @pulumi.getter(name=\"dbId\") def", "Each key is predefined and scoped to a namespace. For", "opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value return", "about a specific External Non Container Database resource in Oracle", "pulumi.set(__self__, \"lifecycle_details\", lifecycle_details) if ncharacter_set and not isinstance(ncharacter_set, str): raise", "pulumi.set(__self__, \"id\", id) if lifecycle_details and not isinstance(lifecycle_details, str): raise", "\"database_management_config\", database_management_config) if database_version and not isinstance(database_version, str): raise TypeError(\"Expected", "pulumi.get(self, \"database_configuration\") @property @pulumi.getter(name=\"databaseEdition\") def database_edition(self) -> str: \"\"\" The", "from . import outputs __all__ = [ 'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database',", "raise TypeError(\"Expected argument 'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\",", "str\") pulumi.set(__self__, \"state\", state) if time_created and not isinstance(time_created, str):", "if time_zone and not isinstance(time_zone, str): raise TypeError(\"Expected argument 'time_zone'", "str): raise TypeError(\"Expected argument 'display_name' to be a str\") pulumi.set(__self__,", "Database resource in Oracle Cloud Infrastructure Database service. Gets information", "not isinstance(database_edition, str): raise TypeError(\"Expected argument 'database_edition' to be a", "\"\"\" return pulumi.get(self, \"db_unique_name\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str,", "Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter(name=\"displayName\") def display_name(self) ->", ".. import _utilities from . import outputs __all__ = [", "= pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__", "'database_management_config' to be a dict\") pulumi.set(__self__, \"database_management_config\", database_management_config) if database_version", "raise TypeError(\"Expected argument 'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\",", "str): raise TypeError(\"Expected argument 'database_version' to be a str\") pulumi.set(__self__,", "``` :param str external_non_container_database_id: The external non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). \"\"\"", "\"\"\" Free-form tags for this resource. Each tag is a", "opts=opts, typ=GetExternalNonContainerDatabaseResult).value return AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set, compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config, database_version=__ret__.database_version,", "ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config, state=self.state, time_created=self.time_created, time_zone=self.time_zone) def get_external_non_container_database(external_non_container_database_id: Optional[str] = None,", "a str\") pulumi.set(__self__, \"db_packs\", db_packs) if db_unique_name and not isinstance(db_unique_name,", "and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to be", "no predefined name, type, or namespace. For more information, see", "The external non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). \"\"\" __args__ = dict() __args__['externalNonContainerDatabaseId']", "argument 'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if", "str: \"\"\" The character set of the external database. \"\"\"", "db_id=self.db_id, db_packs=self.db_packs, db_unique_name=self.db_unique_name, defined_tags=self.defined_tags, display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags, id=self.id, lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set,", "state. \"\"\" return pulumi.get(self, \"lifecycle_details\") @property @pulumi.getter(name=\"ncharacterSet\") def ncharacter_set(self) ->", "external_non_container_database_id) if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument", "= [ 'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database', ] @pulumi.output_type class GetExternalNonContainerDatabaseResult: \"\"\"", "be a str\") pulumi.set(__self__, \"time_zone\", time_zone) @property @pulumi.getter(name=\"characterSet\") def character_set(self)", "\"\"\" return pulumi.get(self, \"time_created\") @property @pulumi.getter(name=\"timeZone\") def time_zone(self) -> str:", "doing! *** import warnings import pulumi import pulumi.runtime from typing", "about the current lifecycle state. \"\"\" return pulumi.get(self, \"lifecycle_details\") @property", "## Example Usage ```python import pulumi import pulumi_oci as oci", "if database_management_config and not isinstance(database_management_config, dict): raise TypeError(\"Expected argument 'database_management_config'", "'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database', ] @pulumi.output_type class GetExternalNonContainerDatabaseResult: \"\"\" A collection", "if external_non_container_database_id and not isinstance(external_non_container_database_id, str): raise TypeError(\"Expected argument 'external_non_container_database_id'", "Gets information about a specific external non-container database. ## Example", "str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if database_configuration and not isinstance(database_configuration, str):", "database_version(self) -> str: \"\"\" The Oracle Database version. \"\"\" return", "-> str: \"\"\" The Oracle Database ID, which identifies an", "information about the current lifecycle state. \"\"\" return pulumi.get(self, \"lifecycle_details\")", "display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags, id=self.id, lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config, state=self.state, time_created=self.time_created, time_zone=self.time_zone)", "\"\"\" __args__ = dict() __args__['externalNonContainerDatabaseId'] = external_non_container_database_id if opts is", "compartment_id=None, database_configuration=None, database_edition=None, database_management_config=None, database_version=None, db_id=None, db_packs=None, db_unique_name=None, defined_tags=None, display_name=None,", "the current lifecycle state. \"\"\" return pulumi.get(self, \"lifecycle_details\") @property @pulumi.getter(name=\"ncharacterSet\")", "if character_set and not isinstance(character_set, str): raise TypeError(\"Expected argument 'character_set'", "format '[+|-]TZH:TZM') or a time zone region name, depending on", "tags for this resource. Each tag is a simple key-value", "not isinstance(state, str): raise TypeError(\"Expected argument 'state' to be a", "database. ## Example Usage ```python import pulumi import pulumi_oci as", "Free-form tags for this resource. Each tag is a simple", "if id and not isinstance(id, str): raise TypeError(\"Expected argument 'id'", "typing import Any, Mapping, Optional, Sequence, Union, overload from ..", "time_zone=None): if character_set and not isinstance(character_set, str): raise TypeError(\"Expected argument", "database_management_config) if database_version and not isinstance(database_version, str): raise TypeError(\"Expected argument", "str\") pulumi.set(__self__, \"id\", id) if lifecycle_details and not isinstance(lifecycle_details, str):", "database_version=self.database_version, db_id=self.db_id, db_packs=self.db_packs, db_unique_name=self.db_unique_name, defined_tags=self.defined_tags, display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags, id=self.id, lifecycle_details=self.lifecycle_details,", "raise TypeError(\"Expected argument 'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\",", "to be a str\") pulumi.set(__self__, \"database_configuration\", database_configuration) if database_edition and", "time_created(self) -> str: \"\"\" The date and time the database", "db_unique_name=None, defined_tags=None, display_name=None, external_non_container_database_id=None, freeform_tags=None, id=None, lifecycle_details=None, ncharacter_set=None, operations_insights_config=None, state=None,", "@pulumi.getter(name=\"ncharacterSet\") def ncharacter_set(self) -> str: \"\"\" The national character of", "and not isinstance(db_packs, str): raise TypeError(\"Expected argument 'db_packs' to be", "database_configuration(self) -> str: \"\"\" The Oracle Database configuration \"\"\" return", "TypeError(\"Expected argument 'character_set' to be a str\") pulumi.set(__self__, \"character_set\", character_set)", "be a str\") pulumi.set(__self__, \"database_edition\", database_edition) if database_management_config and not", "pulumi.set(__self__, \"state\", state) if time_created and not isinstance(time_created, str): raise", "key-value pair with no predefined name, type, or namespace. For", "-> str: \"\"\" The date and time the database was", "if database_configuration and not isinstance(database_configuration, str): raise TypeError(\"Expected argument 'database_configuration'", "Mapping[str, Any]: \"\"\" Defined tags for this resource. Each key", "<reponame>EladGabay/pulumi-oci # coding=utf-8 # *** WARNING: this file was generated", "database_management_config=__ret__.database_management_config, database_version=__ret__.database_version, db_id=__ret__.db_id, db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags, display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id, freeform_tags=__ret__.freeform_tags, id=__ret__.id,", "argument 'database_management_config' to be a dict\") pulumi.set(__self__, \"database_management_config\", database_management_config) if", "@property @pulumi.getter(name=\"externalNonContainerDatabaseId\") def external_non_container_database_id(self) -> str: return pulumi.get(self, \"external_non_container_database_id\") @property", "/ last altered. \"\"\" return pulumi.get(self, \"time_zone\") class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): #", "and not isinstance(database_management_config, dict): raise TypeError(\"Expected argument 'database_management_config' to be", "external non-container database. ## Example Usage ```python import pulumi import", "collection of values returned by getExternalNonContainerDatabase. \"\"\" def __init__(__self__, character_set=None,", "to be a str\") pulumi.set(__self__, \"id\", id) if lifecycle_details and", "database_configuration) if database_edition and not isinstance(database_edition, str): raise TypeError(\"Expected argument", "pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload", "_utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value return AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set,", "str): raise TypeError(\"Expected argument 'db_packs' to be a str\") pulumi.set(__self__,", "'state' to be a str\") pulumi.set(__self__, \"state\", state) if time_created", "'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': \"\"\" The configuration of the Database Management service. \"\"\"", "Oracle Database located outside of Oracle Cloud. \"\"\" return pulumi.get(self,", "Any]: \"\"\" Free-form tags for this resource. Each tag is", "db_id and not isinstance(db_id, str): raise TypeError(\"Expected argument 'db_id' to", "or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\":", "database \"\"\" return pulumi.get(self, \"operations_insights_config\") @property @pulumi.getter def state(self) ->", "lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config, state=self.state, time_created=self.time_created, time_zone=self.time_zone) def get_external_non_container_database(external_non_container_database_id: Optional[str] =", "dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if display_name and not isinstance(display_name, str):", "a str\") pulumi.set(__self__, \"database_version\", database_version) if db_id and not isinstance(db_id,", "None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value", "TypeError(\"Expected argument 'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags)", "str): raise TypeError(\"Expected argument 'time_zone' to be a str\") pulumi.set(__self__,", "a str\") pulumi.set(__self__, \"lifecycle_details\", lifecycle_details) if ncharacter_set and not isinstance(ncharacter_set,", "return pulumi.get(self, \"database_configuration\") @property @pulumi.getter(name=\"databaseEdition\") def database_edition(self) -> str: \"\"\"", "opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExternalNonContainerDatabaseResult: \"\"\" This data source", "isinstance(database_management_config, dict): raise TypeError(\"Expected argument 'database_management_config' to be a dict\")", "external Oracle Database. \"\"\" return pulumi.get(self, \"db_packs\") @property @pulumi.getter(name=\"dbUniqueName\") def", "-> str: \"\"\" The user-friendly name for the external database.", "to be a str\") pulumi.set(__self__, \"db_id\", db_id) if db_packs and", "isinstance(lifecycle_details, str): raise TypeError(\"Expected argument 'lifecycle_details' to be a str\")", "more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return", "\"compartment_id\", compartment_id) if database_configuration and not isinstance(database_configuration, str): raise TypeError(\"Expected", "to be a dict\") pulumi.set(__self__, \"database_management_config\", database_management_config) if database_version and", "Oracle Database configuration \"\"\" return pulumi.get(self, \"database_configuration\") @property @pulumi.getter(name=\"databaseEdition\") def", "for this resource. Each tag is a simple key-value pair", "-> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. \"\"\" return", "str\") pulumi.set(__self__, \"db_unique_name\", db_unique_name) if defined_tags and not isinstance(defined_tags, dict):", "a str\") pulumi.set(__self__, \"database_configuration\", database_configuration) if database_edition and not isinstance(database_edition,", "isinstance(time_zone, str): raise TypeError(\"Expected argument 'time_zone' to be a str\")", "and not isinstance(external_non_container_database_id, str): raise TypeError(\"Expected argument 'external_non_container_database_id' to be", "isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to be a dict\")", "zone offset (a character type in the format '[+|-]TZH:TZM') or", "\"\"\" The Oracle Database configuration \"\"\" return pulumi.get(self, \"database_configuration\") @property", "Union, overload from .. import _utilities from . import outputs", "\"ncharacter_set\", ncharacter_set) if operations_insights_config and not isinstance(operations_insights_config, dict): raise TypeError(\"Expected", "database_management_config and not isinstance(database_management_config, dict): raise TypeError(\"Expected argument 'database_management_config' to", "time_created) if time_zone and not isinstance(time_zone, str): raise TypeError(\"Expected argument", "Database ID, which identifies an Oracle Database located outside of", "str: \"\"\" The `DB_UNIQUE_NAME` of the external database. \"\"\" return", "argument 'database_version' to be a str\") pulumi.set(__self__, \"database_version\", database_version) if", "if state and not isinstance(state, str): raise TypeError(\"Expected argument 'state'", "of the external database. \"\"\" return pulumi.get(self, \"db_unique_name\") @property @pulumi.getter(name=\"definedTags\")", "\"id\") @property @pulumi.getter(name=\"lifecycleDetails\") def lifecycle_details(self) -> str: \"\"\" Additional information", "\"external_non_container_database_id\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form", "def __await__(self): if False: yield self return GetExternalNonContainerDatabaseResult( character_set=self.character_set, compartment_id=self.compartment_id,", "db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags, display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id, freeform_tags=__ret__.freeform_tags, id=__ret__.id, lifecycle_details=__ret__.lifecycle_details, ncharacter_set=__ret__.ncharacter_set, operations_insights_config=__ret__.operations_insights_config,", "from typing import Any, Mapping, Optional, Sequence, Union, overload from", "str): raise TypeError(\"Expected argument 'time_created' to be a str\") pulumi.set(__self__,", "pulumi.get(self, \"db_unique_name\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\"", "@pulumi.getter(name=\"databaseVersion\") def database_version(self) -> str: \"\"\" The Oracle Database version.", "if lifecycle_details and not isinstance(lifecycle_details, str): raise TypeError(\"Expected argument 'lifecycle_details'", "created. \"\"\" return pulumi.get(self, \"time_created\") @property @pulumi.getter(name=\"timeZone\") def time_zone(self) ->", "TypeError(\"Expected argument 'db_id' to be a str\") pulumi.set(__self__, \"db_id\", db_id)", "\"database_configuration\") @property @pulumi.getter(name=\"databaseEdition\") def database_edition(self) -> str: \"\"\" The Oracle", "know what you are doing! *** import warnings import pulumi", "argument 'state' to be a str\") pulumi.set(__self__, \"state\", state) if", "\"database_version\", database_version) if db_id and not isinstance(db_id, str): raise TypeError(\"Expected", "and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to be", "The current state of the Oracle Cloud Infrastructure external database", "Cloud Infrastructure external database resource. \"\"\" return pulumi.get(self, \"state\") @property", "Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities", "Additional information about the current lifecycle state. \"\"\" return pulumi.get(self,", "certain you know what you are doing! *** import warnings", "raise TypeError(\"Expected argument 'db_unique_name' to be a str\") pulumi.set(__self__, \"db_unique_name\",", "specified when the database was created / last altered. \"\"\"", "\"\"\" return pulumi.get(self, \"lifecycle_details\") @property @pulumi.getter(name=\"ncharacterSet\") def ncharacter_set(self) -> str:", "raise TypeError(\"Expected argument 'ncharacter_set' to be a str\") pulumi.set(__self__, \"ncharacter_set\",", "Database version. \"\"\" return pulumi.get(self, \"database_version\") @property @pulumi.getter(name=\"dbId\") def db_id(self)", "pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value return AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set, compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition,", "argument 'database_edition' to be a str\") pulumi.set(__self__, \"database_edition\", database_edition) if", "not isinstance(operations_insights_config, dict): raise TypeError(\"Expected argument 'operations_insights_config' to be a", "Terraform Bridge (tfgen) Tool. *** # *** Do not edit", "import outputs __all__ = [ 'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database', ] @pulumi.output_type", "be a str\") pulumi.set(__self__, \"time_created\", time_created) if time_zone and not", "@property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of", "external database resource. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def", "return pulumi.get(self, \"operations_insights_config\") @property @pulumi.getter def state(self) -> str: \"\"\"", "dict\") pulumi.set(__self__, \"operations_insights_config\", operations_insights_config) if state and not isinstance(state, str):", "def db_id(self) -> str: \"\"\" The Oracle Database ID, which", "The name does not have to be unique. \"\"\" return", "time zone region name, depending on how the time zone", "see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter(name=\"displayName\") def", "you're certain you know what you are doing! *** import", "__args__ = dict() __args__['externalNonContainerDatabaseId'] = external_non_container_database_id if opts is None:", "coding=utf-8 # *** WARNING: this file was generated by the", "isinstance(db_unique_name, str): raise TypeError(\"Expected argument 'db_unique_name' to be a str\")", "Cloud Infrastructure Database service. Gets information about a specific external", "\"\"\" The Oracle Database ID, which identifies an Oracle Database", "(tfgen) Tool. *** # *** Do not edit by hand", "isinstance(database_edition, str): raise TypeError(\"Expected argument 'database_edition' to be a str\")", "Infrastructure external database resource. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"lifecycleDetails\")", "be a str\") pulumi.set(__self__, \"db_unique_name\", db_unique_name) if defined_tags and not", "TypeError(\"Expected argument 'display_name' to be a str\") pulumi.set(__self__, \"display_name\", display_name)", "\"time_zone\") class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): # pylint: disable=using-constant-test def __await__(self): if False:", "the format '[+|-]TZH:TZM') or a time zone region name, depending", "when the database was created / last altered. \"\"\" return", "a str\") pulumi.set(__self__, \"db_unique_name\", db_unique_name) if defined_tags and not isinstance(defined_tags,", "__ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value return AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set, compartment_id=__ret__.compartment_id,", "__init__(__self__, character_set=None, compartment_id=None, database_configuration=None, database_edition=None, database_management_config=None, database_version=None, db_id=None, db_packs=None, db_unique_name=None,", "freeform_tags) if id and not isinstance(id, str): raise TypeError(\"Expected argument", "\"database_edition\") @property @pulumi.getter(name=\"databaseManagementConfig\") def database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': \"\"\" The configuration", "the external Oracle Database. \"\"\" return pulumi.get(self, \"db_packs\") @property @pulumi.getter(name=\"dbUniqueName\")", "\"\"\" Defined tags for this resource. Each key is predefined", "@property @pulumi.getter(name=\"databaseConfiguration\") def database_configuration(self) -> str: \"\"\" The Oracle Database", "@pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags for", "db_id=None, db_packs=None, db_unique_name=None, defined_tags=None, display_name=None, external_non_container_database_id=None, freeform_tags=None, id=None, lifecycle_details=None, ncharacter_set=None,", "str external_non_container_database_id: The external non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). \"\"\" __args__ =", "raise TypeError(\"Expected argument 'operations_insights_config' to be a dict\") pulumi.set(__self__, \"operations_insights_config\",", "database was created / last altered. \"\"\" return pulumi.get(self, \"time_zone\")", "str: \"\"\" The Oracle Database edition. \"\"\" return pulumi.get(self, \"database_edition\")", "[Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property", "import pulumi import pulumi_oci as oci test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database[\"test_external_non_container_database\"][\"id\"]) ```", "@property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags", "to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if id and", "argument 'db_unique_name' to be a str\") pulumi.set(__self__, \"db_unique_name\", db_unique_name) if", "return pulumi.get(self, \"database_version\") @property @pulumi.getter(name=\"dbId\") def db_id(self) -> str: \"\"\"", "defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags for this resource.", "if operations_insights_config and not isinstance(operations_insights_config, dict): raise TypeError(\"Expected argument 'operations_insights_config'", "@pulumi.output_type class GetExternalNonContainerDatabaseResult: \"\"\" A collection of values returned by", "and not isinstance(state, str): raise TypeError(\"Expected argument 'state' to be", "\"\"\" The configuration of Operations Insights for the external database", "str\") pulumi.set(__self__, \"database_edition\", database_edition) if database_management_config and not isinstance(database_management_config, dict):", "not edit by hand unless you're certain you know what", "pulumi.get(self, \"database_edition\") @property @pulumi.getter(name=\"databaseManagementConfig\") def database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': \"\"\" The", "argument 'db_packs' to be a str\") pulumi.set(__self__, \"db_packs\", db_packs) if", "not have to be unique. \"\"\" return pulumi.get(self, \"display_name\") @property", "operations_insights_config=None, state=None, time_created=None, time_zone=None): if character_set and not isinstance(character_set, str):", "database resource. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self)", "be a str\") pulumi.set(__self__, \"database_version\", database_version) if db_id and not", "zone of the external database. It is a time zone", "and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be", "str: \"\"\" The Oracle Database configuration \"\"\" return pulumi.get(self, \"database_configuration\")", "name does not have to be unique. \"\"\" return pulumi.get(self,", "-> str: \"\"\" The Oracle Database version. \"\"\" return pulumi.get(self,", "Mapping[str, Any]: \"\"\" Free-form tags for this resource. Each tag", "display_name) if external_non_container_database_id and not isinstance(external_non_container_database_id, str): raise TypeError(\"Expected argument", "# *** WARNING: this file was generated by the Pulumi", "\"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter(name=\"displayName\") def display_name(self) -> str:", "and not isinstance(database_configuration, str): raise TypeError(\"Expected argument 'database_configuration' to be", "\"database_edition\", database_edition) if database_management_config and not isinstance(database_management_config, dict): raise TypeError(\"Expected", "@property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" The date and", "isinstance(db_packs, str): raise TypeError(\"Expected argument 'db_packs' to be a str\")", "to be a str\") pulumi.set(__self__, \"database_edition\", database_edition) if database_management_config and", "-> str: \"\"\" The character set of the external database.", "@property @pulumi.getter(name=\"dbId\") def db_id(self) -> str: \"\"\" The Oracle Database", "db_unique_name=self.db_unique_name, defined_tags=self.defined_tags, display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags, id=self.id, lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config, state=self.state,", "For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\"", "None) -> AwaitableGetExternalNonContainerDatabaseResult: \"\"\" This data source provides details about", "information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter(name=\"displayName\")", "'time_created' to be a str\") pulumi.set(__self__, \"time_created\", time_created) if time_zone", "def state(self) -> str: \"\"\" The current state of the", "None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExternalNonContainerDatabaseResult: \"\"\" This data", "__all__ = [ 'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database', ] @pulumi.output_type class GetExternalNonContainerDatabaseResult:", "str): raise TypeError(\"Expected argument 'lifecycle_details' to be a str\") pulumi.set(__self__,", "database_version=None, db_id=None, db_packs=None, db_unique_name=None, defined_tags=None, display_name=None, external_non_container_database_id=None, freeform_tags=None, id=None, lifecycle_details=None,", "pulumi.get(self, \"defined_tags\") @property @pulumi.getter(name=\"displayName\") def display_name(self) -> str: \"\"\" The", "of Operations Insights for the external database \"\"\" return pulumi.get(self,", "pulumi.set(__self__, \"ncharacter_set\", ncharacter_set) if operations_insights_config and not isinstance(operations_insights_config, dict): raise", "by hand unless you're certain you know what you are", "time_created=self.time_created, time_zone=self.time_zone) def get_external_non_container_database(external_non_container_database_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] =", "str: \"\"\" Additional information about the current lifecycle state. \"\"\"", "database_edition=None, database_management_config=None, database_version=None, db_id=None, db_packs=None, db_unique_name=None, defined_tags=None, display_name=None, external_non_container_database_id=None, freeform_tags=None,", "@pulumi.getter(name=\"characterSet\") def character_set(self) -> str: \"\"\" The character set of", "@property @pulumi.getter(name=\"databaseManagementConfig\") def database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': \"\"\" The configuration of", "if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase',", "argument 'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if", "current state of the Oracle Cloud Infrastructure external database resource.", "str\") pulumi.set(__self__, \"character_set\", character_set) if compartment_id and not isinstance(compartment_id, str):", "TypeError(\"Expected argument 'lifecycle_details' to be a str\") pulumi.set(__self__, \"lifecycle_details\", lifecycle_details)", "pulumi.set(__self__, \"external_non_container_database_id\", external_non_container_database_id) if freeform_tags and not isinstance(freeform_tags, dict): raise", "operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': \"\"\" The configuration of Operations Insights for", "be a str\") pulumi.set(__self__, \"lifecycle_details\", lifecycle_details) if ncharacter_set and not", "return pulumi.get(self, \"time_zone\") class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): # pylint: disable=using-constant-test def __await__(self):", "information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self,", "and not isinstance(database_edition, str): raise TypeError(\"Expected argument 'database_edition' to be", "not isinstance(database_management_config, dict): raise TypeError(\"Expected argument 'database_management_config' to be a", "predefined and scoped to a namespace. For more information, see", "you are doing! *** import warnings import pulumi import pulumi.runtime", "and scoped to a namespace. For more information, see [Resource", "return pulumi.get(self, \"lifecycle_details\") @property @pulumi.getter(name=\"ncharacterSet\") def ncharacter_set(self) -> str: \"\"\"", "character_set) if compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected argument", "the external database \"\"\" return pulumi.get(self, \"operations_insights_config\") @property @pulumi.getter def", "pulumi.get(self, \"time_created\") @property @pulumi.getter(name=\"timeZone\") def time_zone(self) -> str: \"\"\" The", "GetExternalNonContainerDatabaseResult: \"\"\" A collection of values returned by getExternalNonContainerDatabase. \"\"\"", "to be a str\") pulumi.set(__self__, \"database_version\", database_version) if db_id and", "@property @pulumi.getter(name=\"operationsInsightsConfig\") def operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': \"\"\" The configuration of", "pulumi.set(__self__, \"db_unique_name\", db_unique_name) if defined_tags and not isinstance(defined_tags, dict): raise", "\"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter def id(self) ->", "pulumi.set(__self__, \"time_created\", time_created) if time_zone and not isinstance(time_zone, str): raise", "database_version=__ret__.database_version, db_id=__ret__.db_id, db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags, display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id, freeform_tags=__ret__.freeform_tags, id=__ret__.id, lifecycle_details=__ret__.lifecycle_details,", "TypeError(\"Expected argument 'id' to be a str\") pulumi.set(__self__, \"id\", id)", "altered. \"\"\" return pulumi.get(self, \"time_zone\") class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): # pylint: disable=using-constant-test", "argument 'ncharacter_set' to be a str\") pulumi.set(__self__, \"ncharacter_set\", ncharacter_set) if", "be a dict\") pulumi.set(__self__, \"database_management_config\", database_management_config) if database_version and not", "compartment_id(self) -> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. \"\"\"", "Bridge (tfgen) Tool. *** # *** Do not edit by", "def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags for this", "resource. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) ->", "pair with no predefined name, type, or namespace. For more", "or a time zone region name, depending on how the", "@property @pulumi.getter def state(self) -> str: \"\"\" The current state", "__await__(self): if False: yield self return GetExternalNonContainerDatabaseResult( character_set=self.character_set, compartment_id=self.compartment_id, database_configuration=self.database_configuration,", "a specific External Non Container Database resource in Oracle Cloud", "Example Usage ```python import pulumi import pulumi_oci as oci test_external_non_container_database", "return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"databaseConfiguration\") def database_configuration(self) -> str: \"\"\"", "\"time_created\") @property @pulumi.getter(name=\"timeZone\") def time_zone(self) -> str: \"\"\" The time", "= oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database[\"test_external_non_container_database\"][\"id\"]) ``` :param str external_non_container_database_id: The external non-container database", "returned by getExternalNonContainerDatabase. \"\"\" def __init__(__self__, character_set=None, compartment_id=None, database_configuration=None, database_edition=None,", "str): raise TypeError(\"Expected argument 'ncharacter_set' to be a str\") pulumi.set(__self__,", "WARNING: this file was generated by the Pulumi Terraform Bridge", "A collection of values returned by getExternalNonContainerDatabase. \"\"\" def __init__(__self__,", "database. It is a time zone offset (a character type", "str\") pulumi.set(__self__, \"database_version\", database_version) if db_id and not isinstance(db_id, str):", "\"\"\" return pulumi.get(self, \"time_zone\") class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): # pylint: disable=using-constant-test def", "time_zone=self.time_zone) def get_external_non_container_database(external_non_container_database_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None)", "database. \"\"\" return pulumi.get(self, \"ncharacter_set\") @property @pulumi.getter(name=\"operationsInsightsConfig\") def operations_insights_config(self) ->", "raise TypeError(\"Expected argument 'time_created' to be a str\") pulumi.set(__self__, \"time_created\",", "operations_insights_config and not isinstance(operations_insights_config, dict): raise TypeError(\"Expected argument 'operations_insights_config' to", "str): raise TypeError(\"Expected argument 'db_unique_name' to be a str\") pulumi.set(__self__,", "be a str\") pulumi.set(__self__, \"character_set\", character_set) if compartment_id and not", "*** WARNING: this file was generated by the Pulumi Terraform", "external database. The name does not have to be unique.", "ncharacter_set=None, operations_insights_config=None, state=None, time_created=None, time_zone=None): if character_set and not isinstance(character_set,", "state of the Oracle Cloud Infrastructure external database resource. \"\"\"", "\"\"\" The character set of the external database. \"\"\" return", "of values returned by getExternalNonContainerDatabase. \"\"\" def __init__(__self__, character_set=None, compartment_id=None,", "db_unique_name) if defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument", "Defined tags for this resource. Each key is predefined and", "\"\"\" return pulumi.get(self, \"db_id\") @property @pulumi.getter(name=\"dbPacks\") def db_packs(self) -> str:", "raise TypeError(\"Expected argument 'time_zone' to be a str\") pulumi.set(__self__, \"time_zone\",", "db_unique_name(self) -> str: \"\"\" The `DB_UNIQUE_NAME` of the external database.", "\"state\", state) if time_created and not isinstance(time_created, str): raise TypeError(\"Expected", "def compartment_id(self) -> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment.", "for this resource. Each key is predefined and scoped to", "\"\"\" Additional information about the current lifecycle state. \"\"\" return", "the database was created. \"\"\" return pulumi.get(self, \"time_created\") @property @pulumi.getter(name=\"timeZone\")", "import pulumi_oci as oci test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database[\"test_external_non_container_database\"][\"id\"]) ``` :param str", "defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to", "value was specified when the database was created / last", "specific external non-container database. ## Example Usage ```python import pulumi", "TypeError(\"Expected argument 'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags)", "pulumi.set(__self__, \"database_configuration\", database_configuration) if database_edition and not isinstance(database_edition, str): raise", "\"database_management_config\") @property @pulumi.getter(name=\"databaseVersion\") def database_version(self) -> str: \"\"\" The Oracle", "db_id(self) -> str: \"\"\" The Oracle Database ID, which identifies", "database_version and not isinstance(database_version, str): raise TypeError(\"Expected argument 'database_version' to", "The date and time the database was created. \"\"\" return", "str): raise TypeError(\"Expected argument 'id' to be a str\") pulumi.set(__self__,", "for the external database. The name does not have to", "service. Gets information about a specific external non-container database. ##", "ncharacter_set(self) -> str: \"\"\" The national character of the external", "\"id\", id) if lifecycle_details and not isinstance(lifecycle_details, str): raise TypeError(\"Expected", "database_configuration and not isinstance(database_configuration, str): raise TypeError(\"Expected argument 'database_configuration' to", "str: \"\"\" The Oracle Database version. \"\"\" return pulumi.get(self, \"database_version\")", "Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter def", "of the compartment. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"databaseConfiguration\") def", "Database configuration \"\"\" return pulumi.get(self, \"database_configuration\") @property @pulumi.getter(name=\"databaseEdition\") def database_edition(self)", "def database_version(self) -> str: \"\"\" The Oracle Database version. \"\"\"", "external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags, id=self.id, lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config, state=self.state, time_created=self.time_created, time_zone=self.time_zone) def", "Mapping, Optional, Sequence, Union, overload from .. import _utilities from", "non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). \"\"\" __args__ = dict() __args__['externalNonContainerDatabaseId'] = external_non_container_database_id", "import pulumi import pulumi.runtime from typing import Any, Mapping, Optional,", "\"db_packs\") @property @pulumi.getter(name=\"dbUniqueName\") def db_unique_name(self) -> str: \"\"\" The `DB_UNIQUE_NAME`", "Sequence, Union, overload from .. import _utilities from . import", "[ 'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database', ] @pulumi.output_type class GetExternalNonContainerDatabaseResult: \"\"\" A", "and not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to be", "Operations Insights for the external database \"\"\" return pulumi.get(self, \"operations_insights_config\")", "\"lifecycle_details\") @property @pulumi.getter(name=\"ncharacterSet\") def ncharacter_set(self) -> str: \"\"\" The national", "str): raise TypeError(\"Expected argument 'external_non_container_database_id' to be a str\") pulumi.set(__self__,", "\"\"\" return pulumi.get(self, \"display_name\") @property @pulumi.getter(name=\"externalNonContainerDatabaseId\") def external_non_container_database_id(self) -> str:", "not isinstance(database_configuration, str): raise TypeError(\"Expected argument 'database_configuration' to be a", "database packs licensed for the external Oracle Database. \"\"\" return", "= dict() __args__['externalNonContainerDatabaseId'] = external_non_container_database_id if opts is None: opts", "str): raise TypeError(\"Expected argument 'database_edition' to be a str\") pulumi.set(__self__,", "str): raise TypeError(\"Expected argument 'db_id' to be a str\") pulumi.set(__self__,", "not isinstance(db_packs, str): raise TypeError(\"Expected argument 'db_packs' to be a", "pulumi.get(self, \"freeform_tags\") @property @pulumi.getter def id(self) -> str: \"\"\" The", "db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags, display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id, freeform_tags=__ret__.freeform_tags, id=__ret__.id, lifecycle_details=__ret__.lifecycle_details, ncharacter_set=__ret__.ncharacter_set, operations_insights_config=__ret__.operations_insights_config, state=__ret__.state,", "str\") pulumi.set(__self__, \"time_created\", time_created) if time_zone and not isinstance(time_zone, str):", "def operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': \"\"\" The configuration of Operations Insights", "if database_version and not isinstance(database_version, str): raise TypeError(\"Expected argument 'database_version'", "was created / last altered. \"\"\" return pulumi.get(self, \"time_zone\") class", "\"\"\" The configuration of the Database Management service. \"\"\" return", "Oracle Database ID, which identifies an Oracle Database located outside", "tags for this resource. Each key is predefined and scoped", "Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter", "yield self return GetExternalNonContainerDatabaseResult( character_set=self.character_set, compartment_id=self.compartment_id, database_configuration=self.database_configuration, database_edition=self.database_edition, database_management_config=self.database_management_config, database_version=self.database_version,", "operations_insights_config=self.operations_insights_config, state=self.state, time_created=self.time_created, time_zone=self.time_zone) def get_external_non_container_database(external_non_container_database_id: Optional[str] = None, opts:", "be a str\") pulumi.set(__self__, \"ncharacter_set\", ncharacter_set) if operations_insights_config and not", "@property @pulumi.getter(name=\"dbUniqueName\") def db_unique_name(self) -> str: \"\"\" The `DB_UNIQUE_NAME` of", "the time zone value was specified when the database was", "return pulumi.get(self, \"defined_tags\") @property @pulumi.getter(name=\"displayName\") def display_name(self) -> str: \"\"\"", "not isinstance(lifecycle_details, str): raise TypeError(\"Expected argument 'lifecycle_details' to be a", "pulumi.set(__self__, \"db_id\", db_id) if db_packs and not isinstance(db_packs, str): raise", "'external_non_container_database_id' to be a str\") pulumi.set(__self__, \"external_non_container_database_id\", external_non_container_database_id) if freeform_tags", "defined_tags) if display_name and not isinstance(display_name, str): raise TypeError(\"Expected argument", "a str\") pulumi.set(__self__, \"time_created\", time_created) if time_zone and not isinstance(time_zone,", "be unique. \"\"\" return pulumi.get(self, \"display_name\") @property @pulumi.getter(name=\"externalNonContainerDatabaseId\") def external_non_container_database_id(self)", "'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if id", "'character_set' to be a str\") pulumi.set(__self__, \"character_set\", character_set) if compartment_id", "isinstance(db_id, str): raise TypeError(\"Expected argument 'db_id' to be a str\")", "@property @pulumi.getter(name=\"timeZone\") def time_zone(self) -> str: \"\"\" The time zone", "str\") pulumi.set(__self__, \"display_name\", display_name) if external_non_container_database_id and not isinstance(external_non_container_database_id, str):", "import Any, Mapping, Optional, Sequence, Union, overload from .. import", "resource in Oracle Cloud Infrastructure Database service. Gets information about", "of the Database Management service. \"\"\" return pulumi.get(self, \"database_management_config\") @property", "[OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"databaseConfiguration\")", "The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure external database resource.", "Cloud. \"\"\" return pulumi.get(self, \"db_id\") @property @pulumi.getter(name=\"dbPacks\") def db_packs(self) ->", "ID, which identifies an Oracle Database located outside of Oracle", "\"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"databaseConfiguration\") def database_configuration(self) -> str:", "@pulumi.getter(name=\"databaseConfiguration\") def database_configuration(self) -> str: \"\"\" The Oracle Database configuration", "the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do", "is predefined and scoped to a namespace. For more information,", "\"\"\" This data source provides details about a specific External", "return pulumi.get(self, \"database_edition\") @property @pulumi.getter(name=\"databaseManagementConfig\") def database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': \"\"\"", "compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to", "database resource. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"lifecycleDetails\") def lifecycle_details(self)", "id=self.id, lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config, state=self.state, time_created=self.time_created, time_zone=self.time_zone) def get_external_non_container_database(external_non_container_database_id: Optional[str]", "Infrastructure Database service. Gets information about a specific external non-container", "\"\"\" return pulumi.get(self, \"database_edition\") @property @pulumi.getter(name=\"databaseManagementConfig\") def database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult':", "__args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value return AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set, compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config,", "are doing! *** import warnings import pulumi import pulumi.runtime from", "predefined name, type, or namespace. For more information, see [Resource", "and not isinstance(character_set, str): raise TypeError(\"Expected argument 'character_set' to be", "the Database Management service. \"\"\" return pulumi.get(self, \"database_management_config\") @property @pulumi.getter(name=\"databaseVersion\")", "pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ =", "database_edition=self.database_edition, database_management_config=self.database_management_config, database_version=self.database_version, db_id=self.db_id, db_packs=self.db_packs, db_unique_name=self.db_unique_name, defined_tags=self.defined_tags, display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags,", "pulumi.get(self, \"operations_insights_config\") @property @pulumi.getter def state(self) -> str: \"\"\" The", "The Oracle Database configuration \"\"\" return pulumi.get(self, \"database_configuration\") @property @pulumi.getter(name=\"databaseEdition\")", "-> str: \"\"\" Additional information about the current lifecycle state.", "= external_non_container_database_id if opts is None: opts = pulumi.InvokeOptions() if", "offset (a character type in the format '[+|-]TZH:TZM') or a", "pulumi.set(__self__, \"db_packs\", db_packs) if db_unique_name and not isinstance(db_unique_name, str): raise", "\"\"\" return pulumi.get(self, \"database_configuration\") @property @pulumi.getter(name=\"databaseEdition\") def database_edition(self) -> str:", "pulumi_oci as oci test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database[\"test_external_non_container_database\"][\"id\"]) ``` :param str external_non_container_database_id:", "return AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set, compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config, database_version=__ret__.database_version, db_id=__ret__.db_id, db_packs=__ret__.db_packs,", "as oci test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database[\"test_external_non_container_database\"][\"id\"]) ``` :param str external_non_container_database_id: The", "'ncharacter_set' to be a str\") pulumi.set(__self__, \"ncharacter_set\", ncharacter_set) if operations_insights_config", "argument 'operations_insights_config' to be a dict\") pulumi.set(__self__, \"operations_insights_config\", operations_insights_config) if", "edit by hand unless you're certain you know what you", "str\") pulumi.set(__self__, \"ncharacter_set\", ncharacter_set) if operations_insights_config and not isinstance(operations_insights_config, dict):", "values returned by getExternalNonContainerDatabase. \"\"\" def __init__(__self__, character_set=None, compartment_id=None, database_configuration=None,", "\"character_set\") @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm)", "raise TypeError(\"Expected argument 'database_configuration' to be a str\") pulumi.set(__self__, \"database_configuration\",", "-> str: \"\"\" The national character of the external database.", "-> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure", "TypeError(\"Expected argument 'db_unique_name' to be a str\") pulumi.set(__self__, \"db_unique_name\", db_unique_name)", "version. \"\"\" return pulumi.get(self, \"database_version\") @property @pulumi.getter(name=\"dbId\") def db_id(self) ->", "Each tag is a simple key-value pair with no predefined", "isinstance(ncharacter_set, str): raise TypeError(\"Expected argument 'ncharacter_set' to be a str\")", "GetExternalNonContainerDatabaseResult( character_set=self.character_set, compartment_id=self.compartment_id, database_configuration=self.database_configuration, database_edition=self.database_edition, database_management_config=self.database_management_config, database_version=self.database_version, db_id=self.db_id, db_packs=self.db_packs, db_unique_name=self.db_unique_name,", "-> AwaitableGetExternalNonContainerDatabaseResult: \"\"\" This data source provides details about a", "It is a time zone offset (a character type in", "Oracle Database edition. \"\"\" return pulumi.get(self, \"database_edition\") @property @pulumi.getter(name=\"databaseManagementConfig\") def", "return pulumi.get(self, \"db_packs\") @property @pulumi.getter(name=\"dbUniqueName\") def db_unique_name(self) -> str: \"\"\"", "to be a str\") pulumi.set(__self__, \"state\", state) if time_created and", "@pulumi.getter(name=\"lifecycleDetails\") def lifecycle_details(self) -> str: \"\"\" Additional information about the", "not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to be a", "Do not edit by hand unless you're certain you know", "`{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter def id(self)", "\"database_configuration\", database_configuration) if database_edition and not isinstance(database_edition, str): raise TypeError(\"Expected", "External Non Container Database resource in Oracle Cloud Infrastructure Database", "isinstance(operations_insights_config, dict): raise TypeError(\"Expected argument 'operations_insights_config' to be a dict\")", "The time zone of the external database. It is a", "@property @pulumi.getter(name=\"lifecycleDetails\") def lifecycle_details(self) -> str: \"\"\" Additional information about", "outputs __all__ = [ 'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database', ] @pulumi.output_type class", "self return GetExternalNonContainerDatabaseResult( character_set=self.character_set, compartment_id=self.compartment_id, database_configuration=self.database_configuration, database_edition=self.database_edition, database_management_config=self.database_management_config, database_version=self.database_version, db_id=self.db_id,", "\"external_non_container_database_id\", external_non_container_database_id) if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected", "dict() __args__['externalNonContainerDatabaseId'] = external_non_container_database_id if opts is None: opts =", "Oracle Cloud. \"\"\" return pulumi.get(self, \"db_id\") @property @pulumi.getter(name=\"dbPacks\") def db_packs(self)", "raise TypeError(\"Expected argument 'state' to be a str\") pulumi.set(__self__, \"state\",", "dict): raise TypeError(\"Expected argument 'defined_tags' to be a dict\") pulumi.set(__self__,", "-> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': \"\"\" The configuration of Operations Insights for the", "in the format '[+|-]TZH:TZM') or a time zone region name,", "defined_tags=None, display_name=None, external_non_container_database_id=None, freeform_tags=None, id=None, lifecycle_details=None, ncharacter_set=None, operations_insights_config=None, state=None, time_created=None,", "TypeError(\"Expected argument 'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id)", "be a str\") pulumi.set(__self__, \"external_non_container_database_id\", external_non_container_database_id) if freeform_tags and not", "from .. import _utilities from . import outputs __all__ =", "a str\") pulumi.set(__self__, \"display_name\", display_name) if external_non_container_database_id and not isinstance(external_non_container_database_id,", "if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags'", "pulumi.get(self, \"external_non_container_database_id\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\"", "# *** Do not edit by hand unless you're certain", "@pulumi.getter(name=\"dbUniqueName\") def db_unique_name(self) -> str: \"\"\" The `DB_UNIQUE_NAME` of the", "be a dict\") pulumi.set(__self__, \"operations_insights_config\", operations_insights_config) if state and not", "character set of the external database. \"\"\" return pulumi.get(self, \"character_set\")", "\"defined_tags\") @property @pulumi.getter(name=\"displayName\") def display_name(self) -> str: \"\"\" The user-friendly", "region name, depending on how the time zone value was", "return pulumi.get(self, \"display_name\") @property @pulumi.getter(name=\"externalNonContainerDatabaseId\") def external_non_container_database_id(self) -> str: return", "\"\"\" The Oracle Database edition. \"\"\" return pulumi.get(self, \"database_edition\") @property", "state=None, time_created=None, time_zone=None): if character_set and not isinstance(character_set, str): raise", "id(self) -> str: \"\"\" The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud", "\"operations_insights_config\") @property @pulumi.getter def state(self) -> str: \"\"\" The current", "argument 'time_zone' to be a str\") pulumi.set(__self__, \"time_zone\", time_zone) @property", "database_configuration=self.database_configuration, database_edition=self.database_edition, database_management_config=self.database_management_config, database_version=self.database_version, db_id=self.db_id, db_packs=self.db_packs, db_unique_name=self.db_unique_name, defined_tags=self.defined_tags, display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id,", "@pulumi.getter(name=\"dbPacks\") def db_packs(self) -> str: \"\"\" The database packs licensed", "@pulumi.getter(name=\"databaseManagementConfig\") def database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': \"\"\" The configuration of the", "created / last altered. \"\"\" return pulumi.get(self, \"time_zone\") class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult):", "freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to", "to be a str\") pulumi.set(__self__, \"external_non_container_database_id\", external_non_container_database_id) if freeform_tags and", "For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). \"\"\" return pulumi.get(self, \"defined_tags\")", "and not isinstance(operations_insights_config, dict): raise TypeError(\"Expected argument 'operations_insights_config' to be", "@pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags for", "This data source provides details about a specific External Non", "namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). \"\"\" return pulumi.get(self,", "database. \"\"\" return pulumi.get(self, \"character_set\") @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) ->", "and not isinstance(lifecycle_details, str): raise TypeError(\"Expected argument 'lifecycle_details' to be", "-> str: return pulumi.get(self, \"external_non_container_database_id\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) ->", "was specified when the database was created / last altered.", "raise TypeError(\"Expected argument 'db_id' to be a str\") pulumi.set(__self__, \"db_id\",", "pulumi.set(__self__, \"database_management_config\", database_management_config) if database_version and not isinstance(database_version, str): raise", "if db_unique_name and not isinstance(db_unique_name, str): raise TypeError(\"Expected argument 'db_unique_name'", "class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): # pylint: disable=using-constant-test def __await__(self): if False: yield", "[Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter(name=\"displayName\") def display_name(self)", "(a character type in the format '[+|-]TZH:TZM') or a time", "is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version", "TypeError(\"Expected argument 'external_non_container_database_id' to be a str\") pulumi.set(__self__, \"external_non_container_database_id\", external_non_container_database_id)", "pulumi.get(self, \"db_id\") @property @pulumi.getter(name=\"dbPacks\") def db_packs(self) -> str: \"\"\" The", "Database Management service. \"\"\" return pulumi.get(self, \"database_management_config\") @property @pulumi.getter(name=\"databaseVersion\") def", "type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example:", "@property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags", ":param str external_non_container_database_id: The external non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). \"\"\" __args__", "\"display_name\") @property @pulumi.getter(name=\"externalNonContainerDatabaseId\") def external_non_container_database_id(self) -> str: return pulumi.get(self, \"external_non_container_database_id\")", "not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to be a", "def get_external_non_container_database(external_non_container_database_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) ->", "Any]: \"\"\" Defined tags for this resource. Each key is", "display_name(self) -> str: \"\"\" The user-friendly name for the external", "'db_id' to be a str\") pulumi.set(__self__, \"db_id\", db_id) if db_packs", "provides details about a specific External Non Container Database resource", "-> str: \"\"\" The time zone of the external database.", "service. \"\"\" return pulumi.get(self, \"database_management_config\") @property @pulumi.getter(name=\"databaseVersion\") def database_version(self) ->", "Database. \"\"\" return pulumi.get(self, \"db_packs\") @property @pulumi.getter(name=\"dbUniqueName\") def db_unique_name(self) ->", "how the time zone value was specified when the database", "not isinstance(database_version, str): raise TypeError(\"Expected argument 'database_version' to be a", "isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to be a str\")", "a str\") pulumi.set(__self__, \"database_edition\", database_edition) if database_management_config and not isinstance(database_management_config,", "raise TypeError(\"Expected argument 'external_non_container_database_id' to be a str\") pulumi.set(__self__, \"external_non_container_database_id\",", "configuration of the Database Management service. \"\"\" return pulumi.get(self, \"database_management_config\")", "TypeError(\"Expected argument 'database_configuration' to be a str\") pulumi.set(__self__, \"database_configuration\", database_configuration)", "\"\"\" return pulumi.get(self, \"operations_insights_config\") @property @pulumi.getter def state(self) -> str:", "@pulumi.getter(name=\"displayName\") def display_name(self) -> str: \"\"\" The user-friendly name for", "this resource. Each tag is a simple key-value pair with", "to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if display_name and", "\"\"\" return pulumi.get(self, \"database_version\") @property @pulumi.getter(name=\"dbId\") def db_id(self) -> str:", "return pulumi.get(self, \"ncharacter_set\") @property @pulumi.getter(name=\"operationsInsightsConfig\") def operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': \"\"\"", "raise TypeError(\"Expected argument 'id' to be a str\") pulumi.set(__self__, \"id\",", "return pulumi.get(self, \"character_set\") @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\"", "Optional, Sequence, Union, overload from .. import _utilities from .", "to be a str\") pulumi.set(__self__, \"display_name\", display_name) if external_non_container_database_id and", "for the external database \"\"\" return pulumi.get(self, \"operations_insights_config\") @property @pulumi.getter", "pulumi.get(self, \"database_management_config\") @property @pulumi.getter(name=\"databaseVersion\") def database_version(self) -> str: \"\"\" The", "def database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': \"\"\" The configuration of the Database", "display_name and not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to", "file was generated by the Pulumi Terraform Bridge (tfgen) Tool.", "id and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to", "the Oracle Cloud Infrastructure external database resource. \"\"\" return pulumi.get(self,", "name, depending on how the time zone value was specified", "if ncharacter_set and not isinstance(ncharacter_set, str): raise TypeError(\"Expected argument 'ncharacter_set'", "pulumi.get(self, \"display_name\") @property @pulumi.getter(name=\"externalNonContainerDatabaseId\") def external_non_container_database_id(self) -> str: return pulumi.get(self,", "return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter def id(self) -> str: \"\"\"", "] @pulumi.output_type class GetExternalNonContainerDatabaseResult: \"\"\" A collection of values returned", "time_zone and not isinstance(time_zone, str): raise TypeError(\"Expected argument 'time_zone' to", "namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}`", "\"\"\" The national character of the external database. \"\"\" return", "str\") pulumi.set(__self__, \"db_packs\", db_packs) if db_unique_name and not isinstance(db_unique_name, str):", "'[+|-]TZH:TZM') or a time zone region name, depending on how", "zone value was specified when the database was created /", "to be a str\") pulumi.set(__self__, \"ncharacter_set\", ncharacter_set) if operations_insights_config and", "\"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str:", "a simple key-value pair with no predefined name, type, or", "return pulumi.get(self, \"db_unique_name\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]:", "\"db_unique_name\", db_unique_name) if defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected", "str: \"\"\" The database packs licensed for the external Oracle", "TypeError(\"Expected argument 'time_created' to be a str\") pulumi.set(__self__, \"time_created\", time_created)", "pulumi.set(__self__, \"database_version\", database_version) if db_id and not isinstance(db_id, str): raise", "Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExternalNonContainerDatabaseResult: \"\"\" This data source provides", "not isinstance(db_id, str): raise TypeError(\"Expected argument 'db_id' to be a", "national character of the external database. \"\"\" return pulumi.get(self, \"ncharacter_set\")", "state(self) -> str: \"\"\" The current state of the Oracle", "database_configuration=None, database_edition=None, database_management_config=None, database_version=None, db_id=None, db_packs=None, db_unique_name=None, defined_tags=None, display_name=None, external_non_container_database_id=None,", "if display_name and not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name'", "@pulumi.getter(name=\"dbId\") def db_id(self) -> str: \"\"\" The Oracle Database ID,", "\"\"\" The Oracle Database version. \"\"\" return pulumi.get(self, \"database_version\") @property", "character of the external database. \"\"\" return pulumi.get(self, \"ncharacter_set\") @property", "\"\"\" return pulumi.get(self, \"db_packs\") @property @pulumi.getter(name=\"dbUniqueName\") def db_unique_name(self) -> str:", "display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id, freeform_tags=__ret__.freeform_tags, id=__ret__.id, lifecycle_details=__ret__.lifecycle_details, ncharacter_set=__ret__.ncharacter_set, operations_insights_config=__ret__.operations_insights_config, state=__ret__.state, time_created=__ret__.time_created, time_zone=__ret__.time_zone)", "db_packs) if db_unique_name and not isinstance(db_unique_name, str): raise TypeError(\"Expected argument", "Database edition. \"\"\" return pulumi.get(self, \"database_edition\") @property @pulumi.getter(name=\"databaseManagementConfig\") def database_management_config(self)", "a time zone offset (a character type in the format", "state) if time_created and not isinstance(time_created, str): raise TypeError(\"Expected argument", "external database. It is a time zone offset (a character", "if compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id'" ]
[ "license='MIT', classifiers=[ 'Programming Language :: Python :: 3.6', 'Environment ::", "install class PostInstallCommand(install): user_options = install.user_options + [ ('noservice', None,", "setup( name='xmediusmailrelayserver', version='1.0.0', description='The Python module to be used to", "setuptools.command.install import install class PostInstallCommand(install): user_options = install.user_options + [", "], cmdclass={ 'install': PostInstallCommand }, packages=['xmediusmailrelayserver'], package_data={'xmediusmailrelayserver': ['config.yml']}, install_requires=['pyyaml', 'aiosmtpd'],", "'Environment :: Win32 (MS Windows)', 'Operating System :: Microsoft ::", "console console.install_service(['--startup', 'auto', 'install']) setup( name='xmediusmailrelayserver', version='1.0.0', description='The Python module", "class PostInstallCommand(install): user_options = install.user_options + [ ('noservice', None, None),", "+ [ ('noservice', None, None), ] def initialize_options(self): install.initialize_options(self) self.noservice", "console.install_service(['--startup', 'auto', 'install']) setup( name='xmediusmailrelayserver', version='1.0.0', description='The Python module to", "mail to different servers depending on patterns', long_description='See https://github.com/xmedius/xmedius-mailrelayserver for", "def initialize_options(self): install.initialize_options(self) self.noservice = None def finalize_options(self): install.finalize_options(self) def", "('noservice', None, None), ] def initialize_options(self): install.initialize_options(self) self.noservice = None", ":: Win32 (MS Windows)', 'Operating System :: Microsoft :: Windows'", "relay mail to different servers depending on patterns', long_description='See https://github.com/xmedius/xmedius-mailrelayserver", "xmediusmailrelayserver import console console.install_service(['--startup', 'auto', 'install']) setup( name='xmediusmailrelayserver', version='1.0.0', description='The", "to be used to relay mail to different servers depending", "if not self.noservice: from xmediusmailrelayserver import console console.install_service(['--startup', 'auto', 'install'])", "= install.user_options + [ ('noservice', None, None), ] def initialize_options(self):", "setup from setuptools.command.install import install class PostInstallCommand(install): user_options = install.user_options", "to different servers depending on patterns', long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more", "depending on patterns', long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more information', url='https://github.com/xmedius/xmedius-mailrelayserver/', author='<NAME>',", "cmdclass={ 'install': PostInstallCommand }, packages=['xmediusmailrelayserver'], package_data={'xmediusmailrelayserver': ['config.yml']}, install_requires=['pyyaml', 'aiosmtpd'], dependency_links=[]", ":: 3.6', 'Environment :: Win32 (MS Windows)', 'Operating System ::", "'Operating System :: Microsoft :: Windows' ], cmdclass={ 'install': PostInstallCommand", "description='The Python module to be used to relay mail to", "import setup from setuptools.command.install import install class PostInstallCommand(install): user_options =", "install.run(self) if not self.noservice: from xmediusmailrelayserver import console console.install_service(['--startup', 'auto',", "Windows)', 'Operating System :: Microsoft :: Windows' ], cmdclass={ 'install':", "https://github.com/xmedius/xmedius-mailrelayserver for more information', url='https://github.com/xmedius/xmedius-mailrelayserver/', author='<NAME>', license='MIT', classifiers=[ 'Programming Language", "initialize_options(self): install.initialize_options(self) self.noservice = None def finalize_options(self): install.finalize_options(self) def run(self):", "install.finalize_options(self) def run(self): install.run(self) if not self.noservice: from xmediusmailrelayserver import", "[ ('noservice', None, None), ] def initialize_options(self): install.initialize_options(self) self.noservice =", "servers depending on patterns', long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more information', url='https://github.com/xmedius/xmedius-mailrelayserver/',", "import install class PostInstallCommand(install): user_options = install.user_options + [ ('noservice',", "Win32 (MS Windows)', 'Operating System :: Microsoft :: Windows' ],", "Language :: Python :: 3.6', 'Environment :: Win32 (MS Windows)',", "def finalize_options(self): install.finalize_options(self) def run(self): install.run(self) if not self.noservice: from", "None, None), ] def initialize_options(self): install.initialize_options(self) self.noservice = None def", "different servers depending on patterns', long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more information',", "def run(self): install.run(self) if not self.noservice: from xmediusmailrelayserver import console", "install.user_options + [ ('noservice', None, None), ] def initialize_options(self): install.initialize_options(self)", "= None def finalize_options(self): install.finalize_options(self) def run(self): install.run(self) if not", "to relay mail to different servers depending on patterns', long_description='See", "for more information', url='https://github.com/xmedius/xmedius-mailrelayserver/', author='<NAME>', license='MIT', classifiers=[ 'Programming Language ::", "Python module to be used to relay mail to different", "author='<NAME>', license='MIT', classifiers=[ 'Programming Language :: Python :: 3.6', 'Environment", "classifiers=[ 'Programming Language :: Python :: 3.6', 'Environment :: Win32", "finalize_options(self): install.finalize_options(self) def run(self): install.run(self) if not self.noservice: from xmediusmailrelayserver", "(MS Windows)', 'Operating System :: Microsoft :: Windows' ], cmdclass={", "long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more information', url='https://github.com/xmedius/xmedius-mailrelayserver/', author='<NAME>', license='MIT', classifiers=[ 'Programming", "self.noservice = None def finalize_options(self): install.finalize_options(self) def run(self): install.run(self) if", "from setuptools import setup from setuptools.command.install import install class PostInstallCommand(install):", "'install']) setup( name='xmediusmailrelayserver', version='1.0.0', description='The Python module to be used", "3.6', 'Environment :: Win32 (MS Windows)', 'Operating System :: Microsoft", "not self.noservice: from xmediusmailrelayserver import console console.install_service(['--startup', 'auto', 'install']) setup(", "'auto', 'install']) setup( name='xmediusmailrelayserver', version='1.0.0', description='The Python module to be", "self.noservice: from xmediusmailrelayserver import console console.install_service(['--startup', 'auto', 'install']) setup( name='xmediusmailrelayserver',", "import console console.install_service(['--startup', 'auto', 'install']) setup( name='xmediusmailrelayserver', version='1.0.0', description='The Python", "from xmediusmailrelayserver import console console.install_service(['--startup', 'auto', 'install']) setup( name='xmediusmailrelayserver', version='1.0.0',", "url='https://github.com/xmedius/xmedius-mailrelayserver/', author='<NAME>', license='MIT', classifiers=[ 'Programming Language :: Python :: 3.6',", "] def initialize_options(self): install.initialize_options(self) self.noservice = None def finalize_options(self): install.finalize_options(self)", "'install': PostInstallCommand }, packages=['xmediusmailrelayserver'], package_data={'xmediusmailrelayserver': ['config.yml']}, install_requires=['pyyaml', 'aiosmtpd'], dependency_links=[] )", "version='1.0.0', description='The Python module to be used to relay mail", "install.initialize_options(self) self.noservice = None def finalize_options(self): install.finalize_options(self) def run(self): install.run(self)", "used to relay mail to different servers depending on patterns',", "name='xmediusmailrelayserver', version='1.0.0', description='The Python module to be used to relay", "on patterns', long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more information', url='https://github.com/xmedius/xmedius-mailrelayserver/', author='<NAME>', license='MIT',", "PostInstallCommand(install): user_options = install.user_options + [ ('noservice', None, None), ]", "None def finalize_options(self): install.finalize_options(self) def run(self): install.run(self) if not self.noservice:", "'Programming Language :: Python :: 3.6', 'Environment :: Win32 (MS", "from setuptools.command.install import install class PostInstallCommand(install): user_options = install.user_options +", "more information', url='https://github.com/xmedius/xmedius-mailrelayserver/', author='<NAME>', license='MIT', classifiers=[ 'Programming Language :: Python", "Python :: 3.6', 'Environment :: Win32 (MS Windows)', 'Operating System", "be used to relay mail to different servers depending on", "run(self): install.run(self) if not self.noservice: from xmediusmailrelayserver import console console.install_service(['--startup',", "information', url='https://github.com/xmedius/xmedius-mailrelayserver/', author='<NAME>', license='MIT', classifiers=[ 'Programming Language :: Python ::", "None), ] def initialize_options(self): install.initialize_options(self) self.noservice = None def finalize_options(self):", "setuptools import setup from setuptools.command.install import install class PostInstallCommand(install): user_options", "module to be used to relay mail to different servers", ":: Microsoft :: Windows' ], cmdclass={ 'install': PostInstallCommand }, packages=['xmediusmailrelayserver'],", "System :: Microsoft :: Windows' ], cmdclass={ 'install': PostInstallCommand },", ":: Windows' ], cmdclass={ 'install': PostInstallCommand }, packages=['xmediusmailrelayserver'], package_data={'xmediusmailrelayserver': ['config.yml']},", "Windows' ], cmdclass={ 'install': PostInstallCommand }, packages=['xmediusmailrelayserver'], package_data={'xmediusmailrelayserver': ['config.yml']}, install_requires=['pyyaml',", "user_options = install.user_options + [ ('noservice', None, None), ] def", "patterns', long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more information', url='https://github.com/xmedius/xmedius-mailrelayserver/', author='<NAME>', license='MIT', classifiers=[", ":: Python :: 3.6', 'Environment :: Win32 (MS Windows)', 'Operating", "Microsoft :: Windows' ], cmdclass={ 'install': PostInstallCommand }, packages=['xmediusmailrelayserver'], package_data={'xmediusmailrelayserver':" ]
[ "def reorderList(self, head: ListNode) -> None: \"\"\" Do not return", "\"\"\" if not head: return self.pre = head self.flag =", "= 4.next == 5 ## 4.next = None ## 1.next.next", "x # self.next = None ## 整体上是交换,使用递归,先找到最后节点 ## 1 -》", "4.next == 5 ## 4.next = None ## 1.next.next ==", "= head self.flag = True def test(node): if not node.next:", "# 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例 1: # 给定链表 1->2->3->4, 重新排列为 1->4->2->3.", "重新排列为 1->5->2->4->3. # Definition for singly-linked list. # class ListNode:", "test(node.next) if not self.flag: return if not self.pre.next: self.flag =", "test(node): if not node.next: # 如果 node.next 是 None,就不需要交换了 return", "= False return if self.pre == node: self.flag = False", "-》 5 ## | | ## temp = 1.next ==", "return anything, modify head in-place instead. \"\"\" if not head:", "= False return temp = self.pre.next self.pre.next = node.next self.pre.next.next", "# 给定链表 1->2->3->4->5, 重新排列为 1->5->2->4->3. # Definition for singly-linked list.", "== 5.next = 2 ## now = 2 ## last", "if not self.pre.next: self.flag = False return if self.pre ==", "return if self.pre == node: self.flag = False return temp", "# 将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→… # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例 1: # 给定链表", "# class ListNode: # def __init__(self, x): # self.val =", "return temp = self.pre.next self.pre.next = node.next self.pre.next.next = temp", "head: ListNode) -> None: \"\"\" Do not return anything, modify", "not node.next: # 如果 node.next 是 None,就不需要交换了 return test(node.next) if", "## now = 2 ## last = 3.next class Solution:", "head in-place instead. \"\"\" if not head: return self.pre =", "-》 3 -》 4 -》 5 ## | | ##", "not head: return self.pre = head self.flag = True def", "## | | ## temp = 1.next == 2 ##", "= True def test(node): if not node.next: # 如果 node.next", "self.pre.next.next = temp self.pre = temp node.next = None test(self.pre)", "self.next = None ## 整体上是交换,使用递归,先找到最后节点 ## 1 -》 2 -》", "## temp = 1.next == 2 ## 1.next = 4.next", "4.next = None ## 1.next.next == 5.next = 2 ##", "for singly-linked list. # class ListNode: # def __init__(self, x):", "-》 4 -》 5 ## | | ## temp =", "给定链表 1->2->3->4, 重新排列为 1->4->2->3. # 示例 2: # 给定链表 1->2->3->4->5,", "你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例 1: # 给定链表 1->2->3->4, 重新排列为 1->4->2->3. #", "None: \"\"\" Do not return anything, modify head in-place instead.", "return test(node.next) if not self.flag: return if not self.pre.next: self.flag", "## last = 3.next class Solution: def reorderList(self, head: ListNode)", "self.val = x # self.next = None ## 整体上是交换,使用递归,先找到最后节点 ##", "3.next class Solution: def reorderList(self, head: ListNode) -> None: \"\"\"", "not return anything, modify head in-place instead. \"\"\" if not", "L0→Ln→L1→Ln-1→L2→Ln-2→… # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例 1: # 给定链表 1->2->3->4, 重新排列为", "# 给定链表 1->2->3->4, 重新排列为 1->4->2->3. # 示例 2: # 给定链表", "not self.pre.next: self.flag = False return if self.pre == node:", "list. # class ListNode: # def __init__(self, x): # self.val", "True def test(node): if not node.next: # 如果 node.next 是", "1->2->3->4, 重新排列为 1->4->2->3. # 示例 2: # 给定链表 1->2->3->4->5, 重新排列为", "self.pre.next = node.next self.pre.next.next = temp self.pre = temp node.next", "node: self.flag = False return temp = self.pre.next self.pre.next =", "示例 1: # 给定链表 1->2->3->4, 重新排列为 1->4->2->3. # 示例 2:", "__init__(self, x): # self.val = x # self.next = None", "not self.flag: return if not self.pre.next: self.flag = False return", "self.pre.next: self.flag = False return if self.pre == node: self.flag", "self.flag = False return temp = self.pre.next self.pre.next = node.next", "整体上是交换,使用递归,先找到最后节点 ## 1 -》 2 -》 3 -》 4 -》", "return self.pre = head self.flag = True def test(node): if", "给定链表 1->2->3->4->5, 重新排列为 1->5->2->4->3. # Definition for singly-linked list. #", "# def __init__(self, x): # self.val = x # self.next", "reorderList(self, head: ListNode) -> None: \"\"\" Do not return anything,", "L:L0→L1→…→Ln-1→Ln , # 将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→… # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例 1:", "= None ## 1.next.next == 5.next = 2 ## now", "if not self.flag: return if not self.pre.next: self.flag = False", "modify head in-place instead. \"\"\" if not head: return self.pre", "= None ## 整体上是交换,使用递归,先找到最后节点 ## 1 -》 2 -》 3", "head: return self.pre = head self.flag = True def test(node):", "= self.pre.next self.pre.next = node.next self.pre.next.next = temp self.pre =", "1.next.next == 5.next = 2 ## now = 2 ##", "= x # self.next = None ## 整体上是交换,使用递归,先找到最后节点 ## 1", "= node.next self.pre.next.next = temp self.pre = temp node.next =", "= None # Definition for singly-linked list. # class ListNode:", "self.pre.next self.pre.next = node.next self.pre.next.next = temp self.pre = temp", "# Definition for singly-linked list. # class ListNode: # def", "将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→… # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例 1: # 给定链表 1->2->3->4,", "False return temp = self.pre.next self.pre.next = node.next self.pre.next.next =", "2: # 给定链表 1->2->3->4->5, 重新排列为 1->5->2->4->3. # Definition for singly-linked", "in-place instead. \"\"\" if not head: return self.pre = head", "5 ## | | ## temp = 1.next == 2", "## 1 -》 2 -》 3 -》 4 -》 5", "x # self.next = None # Definition for singly-linked list.", "x): # self.val = x # self.next = None #", "## 1.next.next == 5.next = 2 ## now = 2", "= x # self.next = None # Definition for singly-linked", "1->4->2->3. # 示例 2: # 给定链表 1->2->3->4->5, 重新排列为 1->5->2->4->3. #", "-》 2 -》 3 -》 4 -》 5 ## |", "def test(node): if not node.next: # 如果 node.next 是 None,就不需要交换了", "node.next: # 如果 node.next 是 None,就不需要交换了 return test(node.next) if not", "if not node.next: # 如果 node.next 是 None,就不需要交换了 return test(node.next)", "2 ## last = 3.next class Solution: def reorderList(self, head:", "False return if self.pre == node: self.flag = False return", "5.next = 2 ## now = 2 ## last =", "是 None,就不需要交换了 return test(node.next) if not self.flag: return if not", "temp = self.pre.next self.pre.next = node.next self.pre.next.next = temp self.pre", "class ListNode: # def __init__(self, x): # self.val = x", "143. 重排链表 # 给定一个单链表 L:L0→L1→…→Ln-1→Ln , # 将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→… #", "# 示例 2: # 给定链表 1->2->3->4->5, 重新排列为 1->5->2->4->3. # Definition", "| ## temp = 1.next == 2 ## 1.next =", "示例 2: # 给定链表 1->2->3->4->5, 重新排列为 1->5->2->4->3. # Definition for", "重新排列为 1->4->2->3. # 示例 2: # 给定链表 1->2->3->4->5, 重新排列为 1->5->2->4->3.", "2 ## 1.next = 4.next == 5 ## 4.next =", "## 整体上是交换,使用递归,先找到最后节点 ## 1 -》 2 -》 3 -》 4", "## 1.next = 4.next == 5 ## 4.next = None", "2 -》 3 -》 4 -》 5 ## | |", "== 5 ## 4.next = None ## 1.next.next == 5.next", "self.pre == node: self.flag = False return temp = self.pre.next", "Solution: def reorderList(self, head: ListNode) -> None: \"\"\" Do not", "# self.val = x # self.next = None # Definition", "Definition for singly-linked list. # class ListNode: # def __init__(self,", "x): # self.val = x # self.next = None ##", "None # Definition for singly-linked list. # class ListNode: #", "anything, modify head in-place instead. \"\"\" if not head: return", "-> None: \"\"\" Do not return anything, modify head in-place", "# 示例 1: # 给定链表 1->2->3->4, 重新排列为 1->4->2->3. # 示例", "给定一个单链表 L:L0→L1→…→Ln-1→Ln , # 将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→… # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例", "2 ## now = 2 ## last = 3.next class", "head self.flag = True def test(node): if not node.next: #", "ListNode: # def __init__(self, x): # self.val = x #", "= 2 ## now = 2 ## last = 3.next", "self.pre = head self.flag = True def test(node): if not", "None,就不需要交换了 return test(node.next) if not self.flag: return if not self.pre.next:", "Do not return anything, modify head in-place instead. \"\"\" if", "== 2 ## 1.next = 4.next == 5 ## 4.next", "5 ## 4.next = None ## 1.next.next == 5.next =", "None ## 1.next.next == 5.next = 2 ## now =", "node.next self.pre.next.next = temp self.pre = temp node.next = None", "= 3.next class Solution: def reorderList(self, head: ListNode) -> None:", "| | ## temp = 1.next == 2 ## 1.next", "\"\"\" Do not return anything, modify head in-place instead. \"\"\"", "如果 node.next 是 None,就不需要交换了 return test(node.next) if not self.flag: return", "# 给定一个单链表 L:L0→L1→…→Ln-1→Ln , # 将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→… # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 #", "def __init__(self, x): # self.val = x # self.next =", "return if not self.pre.next: self.flag = False return if self.pre", "1.next = 4.next == 5 ## 4.next = None ##", "# self.val = x # self.next = None ## 整体上是交换,使用递归,先找到最后节点", "singly-linked list. # class ListNode: # def __init__(self, x): #", "1->5->2->4->3. # Definition for singly-linked list. # class ListNode: #", "# self.next = None ## 整体上是交换,使用递归,先找到最后节点 ## 1 -》 2", "## 4.next = None ## 1.next.next == 5.next = 2", "instead. \"\"\" if not head: return self.pre = head self.flag", "重排链表 # 给定一个单链表 L:L0→L1→…→Ln-1→Ln , # 将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→… # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。", "if not head: return self.pre = head self.flag = True", "1->2->3->4->5, 重新排列为 1->5->2->4->3. # Definition for singly-linked list. # class", ", # 将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→… # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例 1: #", "None ## 整体上是交换,使用递归,先找到最后节点 ## 1 -》 2 -》 3 -》", "ListNode) -> None: \"\"\" Do not return anything, modify head", "now = 2 ## last = 3.next class Solution: def", "self.val = x # self.next = None # Definition for", "1: # 给定链表 1->2->3->4, 重新排列为 1->4->2->3. # 示例 2: #", "4 -》 5 ## | | ## temp = 1.next", "if self.pre == node: self.flag = False return temp =", "# 如果 node.next 是 None,就不需要交换了 return test(node.next) if not self.flag:", "self.flag = True def test(node): if not node.next: # 如果", "1.next == 2 ## 1.next = 4.next == 5 ##", "# 143. 重排链表 # 给定一个单链表 L:L0→L1→…→Ln-1→Ln , # 将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→…", "self.flag: return if not self.pre.next: self.flag = False return if", "== node: self.flag = False return temp = self.pre.next self.pre.next", "last = 3.next class Solution: def reorderList(self, head: ListNode) ->", "class Solution: def reorderList(self, head: ListNode) -> None: \"\"\" Do", "self.next = None # Definition for singly-linked list. # class", "# self.next = None # Definition for singly-linked list. #", "node.next 是 None,就不需要交换了 return test(node.next) if not self.flag: return if", "self.flag = False return if self.pre == node: self.flag =", "temp = 1.next == 2 ## 1.next = 4.next ==", "= 1.next == 2 ## 1.next = 4.next == 5", "3 -》 4 -》 5 ## | | ## temp", "1 -》 2 -》 3 -》 4 -》 5 ##", "= 2 ## last = 3.next class Solution: def reorderList(self," ]
[ "self._tag_type), \"arg must be \" + self._tag_type.__name__ self._values.remove(x) @staticmethod def", "#!/usr/bin/env python from CraftProtocol.NBT.NBTBase import NBTBase from CraftProtocol.NBT.NBTProvider import NBTProvider", "return self._values.__len__() def append(self, x): assert isinstance(x, self._tag_type), \"arg must", "\" + self._tag_type.__name__ self._values.remove(x) @staticmethod def write(stream, tag): StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID)", "must be \" + self._tag_type.__name__ self._values.__setitem__(i, o) def __delitem__(self, i):", "self._tag_type def __getitem__(self, i): return self._values.__getitem__(i) def __setitem__(self, i, o):", "\"arg must be \" + self._tag_type.__name__ self._values.append(x) def remove(self, x):", "be \" + self._tag_type.__name__ self._values.remove(x) @staticmethod def write(stream, tag): StreamIO.write_ubyte(stream,", "is None: values = [] self._tag_type = tag_type self._values =", "\"value must be \" + self._tag_type.__name__ self._values.__setitem__(i, o) def __delitem__(self,", "0x09 def __init__(self, tag_type, values=None): NBTBase.__init__(self) if values is None:", "= [] len = StreamIO.read_int(stream) for i in xrange(len): values.append(tag_type.read(stream))", "+ self._tag_type.__name__ self._values.__setitem__(i, o) def __delitem__(self, i): self._values.__delitem__(i) def __iter__(self):", "return self._values.__iter__() def __contains__(self, o): return self._values.__contains__(o) def __len__(self): return", "__len__(self): return self._values.__len__() def append(self, x): assert isinstance(x, self._tag_type), \"arg", "\"arg must be \" + self._tag_type.__name__ self._values.remove(x) @staticmethod def write(stream,", "self._tag_type.__name__ self._values.__setitem__(i, o) def __delitem__(self, i): self._values.__delitem__(i) def __iter__(self): return", "i): self._values.__delitem__(i) def __iter__(self): return self._values.__iter__() def __contains__(self, o): return", "def remove(self, x): assert isinstance(x, self._tag_type), \"arg must be \"", "list(values) def get(self): return self._values def get_tag_type(self): return self._tag_type def", "assert isinstance(o, self._tag_type), \"value must be \" + self._tag_type.__name__ self._values.__setitem__(i,", "python from CraftProtocol.NBT.NBTBase import NBTBase from CraftProtocol.NBT.NBTProvider import NBTProvider from", "get_tag_type(self): return self._tag_type def __getitem__(self, i): return self._values.__getitem__(i) def __setitem__(self,", "for i in tag: tag.get_tag_type().write(stream, i) @staticmethod def read(stream): tag_type_id", "self._values.append(x) def remove(self, x): assert isinstance(x, self._tag_type), \"arg must be", "be \" + self._tag_type.__name__ self._values.append(x) def remove(self, x): assert isinstance(x,", "NBTTagList(NBTBase): TYPE_ID = 0x09 def __init__(self, tag_type, values=None): NBTBase.__init__(self) if", "self._values.__len__() def append(self, x): assert isinstance(x, self._tag_type), \"arg must be", "def __setitem__(self, i, o): assert isinstance(o, self._tag_type), \"value must be", "from CraftProtocol.StreamIO import StreamIO class NBTTagList(NBTBase): TYPE_ID = 0x09 def", "assert isinstance(x, self._tag_type), \"arg must be \" + self._tag_type.__name__ self._values.append(x)", "values=None): NBTBase.__init__(self) if values is None: values = [] self._tag_type", "NBTBase.__init__(self) if values is None: values = [] self._tag_type =", "__getitem__(self, i): return self._values.__getitem__(i) def __setitem__(self, i, o): assert isinstance(o,", "def __init__(self, tag_type, values=None): NBTBase.__init__(self) if values is None: values", "def __getitem__(self, i): return self._values.__getitem__(i) def __setitem__(self, i, o): assert", "None: values = [] self._tag_type = tag_type self._values = list(values)", "tag): StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream, len(tag)) for i in tag: tag.get_tag_type().write(stream,", "import StreamIO class NBTTagList(NBTBase): TYPE_ID = 0x09 def __init__(self, tag_type,", "isinstance(o, self._tag_type), \"value must be \" + self._tag_type.__name__ self._values.__setitem__(i, o)", "= StreamIO.read_int(stream) for i in xrange(len): values.append(tag_type.read(stream)) return NBTTagList(tag_type, values)", "__iter__(self): return self._values.__iter__() def __contains__(self, o): return self._values.__contains__(o) def __len__(self):", "return self._values def get_tag_type(self): return self._tag_type def __getitem__(self, i): return", "def get(self): return self._values def get_tag_type(self): return self._tag_type def __getitem__(self,", "i): return self._values.__getitem__(i) def __setitem__(self, i, o): assert isinstance(o, self._tag_type),", "return self._values.__contains__(o) def __len__(self): return self._values.__len__() def append(self, x): assert", "StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream, len(tag)) for i in tag: tag.get_tag_type().write(stream, i)", "self._values.__delitem__(i) def __iter__(self): return self._values.__iter__() def __contains__(self, o): return self._values.__contains__(o)", "append(self, x): assert isinstance(x, self._tag_type), \"arg must be \" +", "NBTProvider from CraftProtocol.StreamIO import StreamIO class NBTTagList(NBTBase): TYPE_ID = 0x09", "= StreamIO.read_ubyte(stream) tag_type = NBTProvider.get_tag_class(tag_type_id) values = [] len =", "def get_tag_type(self): return self._tag_type def __getitem__(self, i): return self._values.__getitem__(i) def", "self._values def get_tag_type(self): return self._tag_type def __getitem__(self, i): return self._values.__getitem__(i)", "= 0x09 def __init__(self, tag_type, values=None): NBTBase.__init__(self) if values is", "StreamIO.write_int(stream, len(tag)) for i in tag: tag.get_tag_type().write(stream, i) @staticmethod def", "i) @staticmethod def read(stream): tag_type_id = StreamIO.read_ubyte(stream) tag_type = NBTProvider.get_tag_class(tag_type_id)", "self._tag_type.__name__ self._values.remove(x) @staticmethod def write(stream, tag): StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream, len(tag))", "o) def __delitem__(self, i): self._values.__delitem__(i) def __iter__(self): return self._values.__iter__() def", "@staticmethod def write(stream, tag): StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream, len(tag)) for i", "values is None: values = [] self._tag_type = tag_type self._values", "write(stream, tag): StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream, len(tag)) for i in tag:", "TYPE_ID = 0x09 def __init__(self, tag_type, values=None): NBTBase.__init__(self) if values", "assert isinstance(x, self._tag_type), \"arg must be \" + self._tag_type.__name__ self._values.remove(x)", "= tag_type self._values = list(values) def get(self): return self._values def", "in tag: tag.get_tag_type().write(stream, i) @staticmethod def read(stream): tag_type_id = StreamIO.read_ubyte(stream)", "import NBTBase from CraftProtocol.NBT.NBTProvider import NBTProvider from CraftProtocol.StreamIO import StreamIO", "def read(stream): tag_type_id = StreamIO.read_ubyte(stream) tag_type = NBTProvider.get_tag_class(tag_type_id) values =", "x): assert isinstance(x, self._tag_type), \"arg must be \" + self._tag_type.__name__", "[] self._tag_type = tag_type self._values = list(values) def get(self): return", "+ self._tag_type.__name__ self._values.remove(x) @staticmethod def write(stream, tag): StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream,", "StreamIO.read_ubyte(stream) tag_type = NBTProvider.get_tag_class(tag_type_id) values = [] len = StreamIO.read_int(stream)", "tag_type, values=None): NBTBase.__init__(self) if values is None: values = []", "class NBTTagList(NBTBase): TYPE_ID = 0x09 def __init__(self, tag_type, values=None): NBTBase.__init__(self)", "def __delitem__(self, i): self._values.__delitem__(i) def __iter__(self): return self._values.__iter__() def __contains__(self,", "i in tag: tag.get_tag_type().write(stream, i) @staticmethod def read(stream): tag_type_id =", "tag: tag.get_tag_type().write(stream, i) @staticmethod def read(stream): tag_type_id = StreamIO.read_ubyte(stream) tag_type", "= list(values) def get(self): return self._values def get_tag_type(self): return self._tag_type", "if values is None: values = [] self._tag_type = tag_type", "+ self._tag_type.__name__ self._values.append(x) def remove(self, x): assert isinstance(x, self._tag_type), \"arg", "return self._tag_type def __getitem__(self, i): return self._values.__getitem__(i) def __setitem__(self, i,", "tag_type self._values = list(values) def get(self): return self._values def get_tag_type(self):", "self._tag_type), \"value must be \" + self._tag_type.__name__ self._values.__setitem__(i, o) def", "return self._values.__getitem__(i) def __setitem__(self, i, o): assert isinstance(o, self._tag_type), \"value", "self._tag_type.__name__ self._values.append(x) def remove(self, x): assert isinstance(x, self._tag_type), \"arg must", "must be \" + self._tag_type.__name__ self._values.remove(x) @staticmethod def write(stream, tag):", "i, o): assert isinstance(o, self._tag_type), \"value must be \" +", "tag.get_tag_type().write(stream, i) @staticmethod def read(stream): tag_type_id = StreamIO.read_ubyte(stream) tag_type =", "[] len = StreamIO.read_int(stream) for i in xrange(len): values.append(tag_type.read(stream)) return", "import NBTProvider from CraftProtocol.StreamIO import StreamIO class NBTTagList(NBTBase): TYPE_ID =", "CraftProtocol.NBT.NBTBase import NBTBase from CraftProtocol.NBT.NBTProvider import NBTProvider from CraftProtocol.StreamIO import", "be \" + self._tag_type.__name__ self._values.__setitem__(i, o) def __delitem__(self, i): self._values.__delitem__(i)", "__contains__(self, o): return self._values.__contains__(o) def __len__(self): return self._values.__len__() def append(self,", "self._values.__getitem__(i) def __setitem__(self, i, o): assert isinstance(o, self._tag_type), \"value must", "NBTProvider.get_tag_class(tag_type_id) values = [] len = StreamIO.read_int(stream) for i in", "get(self): return self._values def get_tag_type(self): return self._tag_type def __getitem__(self, i):", "tag_type = NBTProvider.get_tag_class(tag_type_id) values = [] len = StreamIO.read_int(stream) for", "= [] self._tag_type = tag_type self._values = list(values) def get(self):", "__delitem__(self, i): self._values.__delitem__(i) def __iter__(self): return self._values.__iter__() def __contains__(self, o):", "self._tag_type), \"arg must be \" + self._tag_type.__name__ self._values.append(x) def remove(self,", "read(stream): tag_type_id = StreamIO.read_ubyte(stream) tag_type = NBTProvider.get_tag_class(tag_type_id) values = []", "self._values.remove(x) @staticmethod def write(stream, tag): StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream, len(tag)) for", "__setitem__(self, i, o): assert isinstance(o, self._tag_type), \"value must be \"", "def __contains__(self, o): return self._values.__contains__(o) def __len__(self): return self._values.__len__() def", "self._tag_type = tag_type self._values = list(values) def get(self): return self._values", "self._values.__iter__() def __contains__(self, o): return self._values.__contains__(o) def __len__(self): return self._values.__len__()", "o): return self._values.__contains__(o) def __len__(self): return self._values.__len__() def append(self, x):", "from CraftProtocol.NBT.NBTProvider import NBTProvider from CraftProtocol.StreamIO import StreamIO class NBTTagList(NBTBase):", "def __iter__(self): return self._values.__iter__() def __contains__(self, o): return self._values.__contains__(o) def", "= NBTProvider.get_tag_class(tag_type_id) values = [] len = StreamIO.read_int(stream) for i", "o): assert isinstance(o, self._tag_type), \"value must be \" + self._tag_type.__name__", "def write(stream, tag): StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream, len(tag)) for i in", "must be \" + self._tag_type.__name__ self._values.append(x) def remove(self, x): assert", "isinstance(x, self._tag_type), \"arg must be \" + self._tag_type.__name__ self._values.append(x) def", "values = [] len = StreamIO.read_int(stream) for i in xrange(len):", "@staticmethod def read(stream): tag_type_id = StreamIO.read_ubyte(stream) tag_type = NBTProvider.get_tag_class(tag_type_id) values", "StreamIO class NBTTagList(NBTBase): TYPE_ID = 0x09 def __init__(self, tag_type, values=None):", "len(tag)) for i in tag: tag.get_tag_type().write(stream, i) @staticmethod def read(stream):", "from CraftProtocol.NBT.NBTBase import NBTBase from CraftProtocol.NBT.NBTProvider import NBTProvider from CraftProtocol.StreamIO", "tag_type_id = StreamIO.read_ubyte(stream) tag_type = NBTProvider.get_tag_class(tag_type_id) values = [] len", "values = [] self._tag_type = tag_type self._values = list(values) def", "len = StreamIO.read_int(stream) for i in xrange(len): values.append(tag_type.read(stream)) return NBTTagList(tag_type,", "isinstance(x, self._tag_type), \"arg must be \" + self._tag_type.__name__ self._values.remove(x) @staticmethod", "def __len__(self): return self._values.__len__() def append(self, x): assert isinstance(x, self._tag_type),", "\" + self._tag_type.__name__ self._values.append(x) def remove(self, x): assert isinstance(x, self._tag_type),", "CraftProtocol.StreamIO import StreamIO class NBTTagList(NBTBase): TYPE_ID = 0x09 def __init__(self,", "tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream, len(tag)) for i in tag: tag.get_tag_type().write(stream, i) @staticmethod", "self._values.__setitem__(i, o) def __delitem__(self, i): self._values.__delitem__(i) def __iter__(self): return self._values.__iter__()", "__init__(self, tag_type, values=None): NBTBase.__init__(self) if values is None: values =", "CraftProtocol.NBT.NBTProvider import NBTProvider from CraftProtocol.StreamIO import StreamIO class NBTTagList(NBTBase): TYPE_ID", "self._values = list(values) def get(self): return self._values def get_tag_type(self): return", "self._values.__contains__(o) def __len__(self): return self._values.__len__() def append(self, x): assert isinstance(x,", "\" + self._tag_type.__name__ self._values.__setitem__(i, o) def __delitem__(self, i): self._values.__delitem__(i) def", "NBTBase from CraftProtocol.NBT.NBTProvider import NBTProvider from CraftProtocol.StreamIO import StreamIO class", "def append(self, x): assert isinstance(x, self._tag_type), \"arg must be \"", "remove(self, x): assert isinstance(x, self._tag_type), \"arg must be \" +" ]
[ "1.07 return test_values(res) def prod(x, t_indx, s_indx): res = x.copy()", "-5.225820110717917, 57.71107021356826) grid = transit(grid, 2, [12], [1.0]) grid =", "10) grid = shift(grid, 8, 1, -0.2952350240798842) grid = sin(grid,", "res = x.copy() res[:,:,t_indx] = np.prod(x[:,:,s_indx], -1) return test_values(res) def", "3, 11, -6.496603906160505, -73.75617586359363) grid = transit(grid, 6, [6, 14],", "0.31428322385174284]) grid = shift(grid, 6, 15, 4.115946851379848) grid = transit(grid,", "10, 9, 6.219381309190064, -71.03631884776823) grid = sin(grid, 9, 6, 1.6821417847846682,", "0, 6], [0.24973877983541862, 0.3378766591098989, 0.15974656746239488, 0.027776085211312595, 0.02330072841260748, 0.20156117996836745]) grid =", "[0.10267794314653868, 0.019022820046952493, 0.061606568183823145, 0.4832751235896067, 0.33341754503307897]) grid = transit(grid, 13, [10,", "= transit(grid, 1, [4, 14, 0, 13], [0.2785496566747933, 0.004915230889640017, 0.30146401859790545,", "[1.0]) grid = prod(grid, 14, [11, 10]) grid = transit(grid,", "(1 + shift[i])) + 1) * 2 - 1 if", "6, 14, 0, 3, 11], [0.13835365002720226, 0.008781149737259792, 0.24627334258742545, 0.04870190081124998, 0.049950480577274,", "y * -0.5155435342135386) / 2 grid[:,:,4] = (x * -0.6644350461377522", "test_values(res) def smooth_max(x, t_indx, s1_indx, s2_indx, p = 10): res", "8, [9, 3], [0.30088974760959275, 0.6991102523904072]) grid = transit(grid, 8, [2,", "0.5257364876661353]) grid = inverse(grid, 1, 0) grid = smooth_max(grid, 1,", "6, 14, -1.927951619591129, -65.3028706482776) grid = prod(grid, 14, [13]) grid", "1)*SIZE, y*SIZE:(y+1)*SIZE] = grid[:,:,j] img = (img + 1) *", "8, [6, 2], [0.6857167761482571, 0.31428322385174284]) grid = shift(grid, 6, 15,", "= sin(grid, 3, 12, -4.078686662791614, 24.459526349523884) grid = inverse(grid, 15,", "0, [14, 3, 11, 10, 7], [0.5203714128788618, 0.068511863728177, 0.10141059844877331, 0.2728285912351676,", "> 0: res[:,:,t_indx] = (-np.abs(((x[:,:,s_indx] + 1) / 2) **", "9, 12, 6, 11, 14], [0.10006330804326793, 0.03891760159161208, 0.005474465860804227, 0.12962618248625338, 0.03090992138168193,", "2) grid = smooth_min(grid, 7, 4, 13) grid = magnitude(grid,", "= prod(grid, 1, [12, 13]) grid = sin(grid, 6, 14,", "7, [11, 9, 2], [0.5001532946669459, 0.42070604285213226, 0.07914066248092186]) grid = inverse(grid,", "[13], [1.0]) grid = sin(grid, 3, 12, 6.470760426148978, -53.62090724330151) grid", "2, 8, 14], [0.3705316303566195, 0.1755951969700656, 0.043989590834687294, 0.22866693087969006, 0.1812166509589377]) grid =", "* 4)) for j in range(GRID_CHANNELS): x = j %", "[0.1290607634325389, 0.8709392365674611]) grid = transit(grid, 14, [14, 13, 15], [0.530662002197574,", "1, 11, 0, 15], [0.036901331671075975, 0.5054281720479712, 0.13288430351514774, 0.10820806749406277, 0.21657812527174225]) grid", "[4, 13], 2) grid = transit(grid, 8, [5, 4, 15,", "2, 0.9155140652310594, -34.1653400637653) grid = transit(grid, 8, [14], [1.0]) grid", "8, [9, 10, 2, 15, 13], [0.3265190472987195, 0.21568397721657098, 0.06226802479442838, 0.0028158122366541832,", "7, [15, 6, 2, 7], [0.45073658968521574, 0.16060948991238613, 0.12949271785123345, 0.2591612025511646]) grid", "transit(grid, 10, [14], [1.0]) grid = transit(grid, 15, [11, 4,", "[7], [1.0]) grid = sin(grid, 10, 9, 6.219381309190064, -71.03631884776823) grid", "0], [0.2070905138265326, 0.06562120796792839, 0.17355051228662716, 0.05514926535269553, 0.0829726599151083, 0.41561584065110807]) grid = transit(grid,", "sin(grid, 10, 4, 1.2844464834351186, -45.836492724169695) grid = sin(grid, 1, 2,", "= transit(grid, 11, [1, 15, 5, 0, 6, 12, 2,", "grid = smooth_max(grid, 8, 11, 15) grid = sin(grid, 12,", "s_indx, shift): res = x.copy() if shift > 0: res[:,:,t_indx]", "0], 2) grid = transit(grid, 6, [15, 8], [0.5303803951305812, 0.4696196048694189])", "params SIZE = 768 GRID_CHANNELS = 16 def test_values(arr): if", "grid = magnitude(grid, 9, [12, 14, 4], 2) grid =", "transit(grid, 11, [2], [1.0]) #create color space def shift_colors(x, shift):", "[0.010597803396528332, 0.7371576932264431, 0.25224450337702853]) grid = sin(grid, 11, 8, 4.303514875116891, -67.11152580467314)", "[0.23221079251346607, 0.3307147367708056, 0.26199556841553734, 0.018127231672754242, 0.13788777275073352, 0.01906389787670339]) grid = sin(grid, 4,", "[5, 15, 10], [0.13237609957996088, 0.22944646977966682, 0.6381774306403722]) grid = transit(grid, 6,", "0, [4, 3, 8], [0.23275058190778222, 0.49901982570530873, 0.2682295923869092]) grid = magnitude(grid,", "res = x.copy() res[:,:,t_indx] = np.log((np.exp(x[:,:,s1_indx] * p) + np.exp(x[:,:,s2_indx]", "2) grid = transit(grid, 13, [11, 0], [0.6569516962992897, 0.3430483037007103]) grid", "grid = transit(grid, 2, [12], [1.0]) grid = prod(grid, 14,", "0.12939497982917472, 0.08164480089591167, 0.24583958083442445, 0.2244518823086713, 0.13799633398542827]) grid = transit(grid, 11, [0],", "= smooth_min(grid, 9, 9, 10) grid = shift(grid, 8, 1,", "= sin(grid, 4, 8, 4.28026157040775, -75.14180284322572) grid = prod(grid, 3,", "2020, 18:58:18) [MSC v.1900 64 bit (AMD64)] # For more", "2) grid = prod(grid, 2, [3, 11, 1]) grid =", "grid = sin(grid, 5, 14, -1.45141083652418, -99.85812912291547) grid = transit(grid,", "[13]) grid = sin(grid, 1, 12, -0.5111321725063378, 18.261359970959475) grid =", "transit(grid, 0, [7, 11, 15, 8, 12, 0, 4, 14,", "if np.amin(arr) < -1 or np.amax(arr) > 1: raise Exception('Values", "2) grid = transit(grid, 8, [3, 15, 9, 6, 11],", "** (1/p)) / 1.07 return test_values(res) def prod(x, t_indx, s_indx):", "[5, 0], 2) grid = transit(grid, 6, [15, 8], [0.5303803951305812,", "0.02665655056773558, 0.17667886361751853, 0.15211061797378253, 0.016462544099609754, 0.0072484377164178625, 0.4477791048998878, 0.11849249751317383]) grid = transit(grid,", "9, 12, 2], [0.031587088727564654, 0.024264739611302585, 0.0306940545567164, 0.19611241111174804, 0.7173417059926683]) grid =", "14, 15, 7, 1], [0.20378471182464508, 0.038241020379710625, 0.16903312106740406, 0.3387613981701764, 0.11303295854369695, 0.13714679001436697])", "7, 15, -4.9164570678736865, 86.15931416043557) grid = sin(grid, 1, 7, 1.6265187305620117,", "1, 11, 0.5071121900678415, 10.950101187785563) grid = shift(grid, 13, 3, 5.677279514103952)", "ord = ord) / np.sqrt(len(s_indx)) return test_values(res) def shift(x, t_indx,", "2, [1, 2], [0.9078557995211777, 0.09214420047882232]) grid = smooth_max(grid, 1, 0,", "= transit(grid, 1, [3], [1.0]) grid = magnitude(grid, 14, [4],", "0, 11, 7, 3, 8], [0.03500911832175082, 0.03265868671024263, 0.3248025339288217, 0.4234363710484886, 0.13338109758306646,", "+ y * 0.7746597063144072) / 2 grid[:,:,12] = (x *", "= smooth_min(grid, 4, 1, 8) grid = sin(grid, 4, 4,", "17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)] # For", "[0, 4, 2], [0.010597803396528332, 0.7371576932264431, 0.25224450337702853]) grid = sin(grid, 11,", "1, [12, 8, 10, 4, 2], [0.43102537693091664, 0.25433300797798253, 0.21618454566402304, 0.046743011673522995,", "1) ** (1 / (1 - shift[i])) * 2 -", "2 grid[:,:,12] = (x * -0.5303146721156469 + y * -0.41048419195488317)", "0.2798246614241187]) grid = prod(grid, 4, [10, 0, 2, 4, 8,", "grid = sin(grid, 3, 12, 6.470760426148978, -53.62090724330151) grid = sin(grid,", "[0.085742434722219, 0.4119764535375412, 0.08377067725345017, 0.13045782410775286, 0.02917564277599849, 0.12489006625007311, 0.13398690135296518]) grid = transit(grid,", "[0.11084510086381213, 0.003439701966452383, 0.10819642722960272, 0.15371289739415475, 0.25812192912399506, 0.005727171643985687, 0.14633649245899077, 0.033890406689391105, 0.05550396325806974, 0.1242259093715456])", "/ 2) ** (1 - shift) - 1) ** (1", "> 1: raise Exception('Values went to far! [ %.2f :", "13, 4, 7], [0.23221079251346607, 0.3307147367708056, 0.26199556841553734, 0.018127231672754242, 0.13788777275073352, 0.01906389787670339]) grid", "9, [12, 14, 4], 2) grid = shift(grid, 3, 9,", "np.linalg.norm(x[:,:,s_indx], axis = -1, ord = ord) / np.sqrt(len(s_indx)) return", "-89.43842740853354) grid = transit(grid, 0, [12, 6, 4, 9, 1,", "0.08637063851194285, 0.06076815802338077, 0.022574848472165728]) grid = transit(grid, 4, [11, 4, 15,", "= transit(grid, 4, [11, 4, 15, 10, 8, 5, 2,", "= transit(grid, 5, [1, 9, 3, 10, 4], [0.24075568684771534, 0.02527375632067568,", "7) grid = smooth_max(grid, 8, 10, 6) grid = prod(grid,", "2, 7, 4], [0.03047869593495055, 0.024092687676923453, 0.02665655056773558, 0.17667886361751853, 0.15211061797378253, 0.016462544099609754, 0.0072484377164178625,", "10, -3.1151555334821888, 17.571856948335267) grid = prod(grid, 6, [2, 4, 13])", "0.1739322518414499) / 2 grid[:,:,5] = (x * -0.5986715486203882 + y", "transit(grid, 0, [7, 1, 11, 0, 15], [0.036901331671075975, 0.5054281720479712, 0.13288430351514774,", "5, 0, 6, 12, 2, 7, 4], [0.03047869593495055, 0.024092687676923453, 0.02665655056773558,", "sin(grid, 9, 9, -4.261918262131112, 18.680580924548693) grid = smooth_max(grid, 2, 2,", "[0.21908823570589997, 0.1636179110868493, 0.03797238284324163, 0.29532957711092916, 0.2839918932530799]) grid = sin(grid, 4, 3,", "sin(grid, 4, 10, -3.680544885171134, 30.633332441673872) grid = transit(grid, 11, [12,", "** p return test_values(res) #set initial grid grid = np.zeros((SIZE,", "[5, 11, 15, 8, 2, 13, 12, 3, 6], [0.1020239434902293,", "09:21:40 UTC # GAS change date: 2021-11-28 09:20:21 UTC #", "[1.0]) grid = transit(grid, 4, [1, 12, 15, 13, 3],", "0.22944646977966682, 0.6381774306403722]) grid = transit(grid, 6, [15], [1.0]) grid =", "[11], [1.0]) grid = transit(grid, 5, [9, 13, 3, 14],", "8, [11, 7, 4, 12]) grid = transit(grid, 7, [15,", "15]) grid = prod(grid, 8, [11, 7, 4, 12]) grid", "= -x[:,:,s_indx] return test_values(res) def smooth_max(x, t_indx, s1_indx, s2_indx, p", "= magnitude(grid, 10, [11, 0, 5], 2) grid = magnitude(grid,", "= smooth_max(grid, 8, 10, 6) grid = prod(grid, 3, [2,", "9, 8, -0.8743741598911887, 15.92872484723533) grid = transit(grid, 4, [3, 13,", "transit(grid, 4, [1, 12, 15, 13, 3], [0.32356965941479515, 0.022696478437764827, 0.2132573540073865,", "[0.05863158300898051, 0.3467981515651057, 0.262107802795733, 0.038001653167336905, 0.2112967596903696, 0.002128256606899112, 0.08103579316557531]) grid = shift(grid,", "transit(grid, 2, [0, 4, 2], [0.010597803396528332, 0.7371576932264431, 0.25224450337702853]) grid =", "= magnitude(grid, 0, [5, 0], 2) grid = transit(grid, 6,", "0.17372789204937897, 0.08036453739500136, 0.09747098994785518, 0.040818441056887325, 0.16796111771248814, 0.07628940657007711]) grid = transit(grid, 3,", "2 * 255).clip(0,255) #save results im = Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__) +", "grid = sin(grid, 12, 6, -3.621533174445339, 24.02414911462421) grid = sin(grid,", "alphas, axis = -1) return test_values(res.clip(-1,1)) def sin(x, t_indx, s_indx,", "prod(grid, 8, [11, 7, 4, 12]) grid = transit(grid, 7,", "5, 8.18216846853571, -6.729427492311089) grid = magnitude(grid, 11, [8, 2], 2)", "return arr #define grid transformation methods def transit(x, t_indx, s_indx,", "7, 13, 8], 2) grid = transit(grid, 8, [3, 15,", "-1.842523240371888, 74.23947694195837) grid = inverse(grid, 7, 8) grid = smooth_max(grid,", "* 0.2914526739617249) / 2 grid[:,:,2] = (x * 0.9804797761207309 +", "10, -1.8565532127479274, -54.75186223635349) grid = transit(grid, 10, [14], [1.0]) grid", "12, 0, 4, 14, 3, 5], [0.11084510086381213, 0.003439701966452383, 0.10819642722960272, 0.15371289739415475,", "13, [6, 3, 7]) grid = sin(grid, 0, 3, -3.561651028660104,", "12, 14, 15, 7, 1], [0.20378471182464508, 0.038241020379710625, 0.16903312106740406, 0.3387613981701764, 0.11303295854369695,", "= np.sum(x[:,:,s_indx] * alphas, axis = -1) return test_values(res.clip(-1,1)) def", "8, 4.303514875116891, -67.11152580467314) grid = prod(grid, 5, [3, 9, 2])", "6, 1.6821417847846682, -64.12547446801875) grid = sin(grid, 13, 3, -0.15800274281797377, 90.63950889076133)", "smooth_max(grid, 10, 5, 13) grid = sin(grid, 9, 10, -1.8565532127479274,", "sin(grid, 12, 4, -1.6398586072056767, 84.51374680259704) grid = sin(grid, 1, 1,", "5, 7, 14], [0.05801706264076675, 0.341923243761946, 0.0494872820880747, 0.29583940098242745, 0.2547330105267852]) grid =", "grid = prod(grid, 2, [8, 7, 11, 10, 15, 0,", "0.05399039482501285]) grid = transit(grid, 9, [5], [1.0]) grid = transit(grid,", "0.10154488887533689, 12.479110491961137) grid = magnitude(grid, 1, [7], 2) grid =", "y * 0.5388833863473126) / 2 grid[:,:,10] = (x * -0.4262457935185371", "0.4119764535375412, 0.08377067725345017, 0.13045782410775286, 0.02917564277599849, 0.12489006625007311, 0.13398690135296518]) grid = transit(grid, 2,", "0.10854801586669052) grid = shift(grid, 8, 9, 2.766857264282361) grid = transit(grid,", "[1, 9, 3, 10, 4], [0.24075568684771534, 0.02527375632067568, 0.4828116495090197, 0.09546712897709621, 0.15569177834549294])", "8, 7], [0.207462236904601, 0.11516125867317799, 0.12240760599022518, 0.05066197369764289, 0.13869178538077429, 0.09948828746526778, 0.16686217850764798, 0.09926467338066268])", "16 def test_values(arr): if np.isnan(arr).any(): raise Exception('Array has None elements!')", "0.09546712897709621, 0.15569177834549294]) grid = sin(grid, 6, 3, -0.1377650382373763, -96.34412250071645) grid", "15, 8, 2, 13, 12, 3, 6], [0.1020239434902293, 0.05405846145210329, 0.11525379082942891,", "0.038241020379710625, 0.16903312106740406, 0.3387613981701764, 0.11303295854369695, 0.13714679001436697]) grid = transit(grid, 4, [14,", "transit(grid, 1, [1, 14, 8], [0.38986786543390084, 0.40057743619803005, 0.20955469836806906]) grid =", "grid = transit(grid, 3, [15, 11, 2, 8, 0], [0.28772794692354614,", "grid[:,:,11] = (x * 0.8435706697714382 + y * 0.7746597063144072) /", "6]) grid = transit(grid, 8, [9, 3], [0.30088974760959275, 0.6991102523904072]) grid", "= shift(grid, 6, 1, -1.115193397983063) grid = smooth_max(grid, 13, 3,", "+ y * -0.6817079327248272) / 2 grid[:,:,11] = (x *", "11) grid = transit(grid, 5, [11, 4, 2, 1, 13,", "8, 5, 2, 12], [0.05731677054419865, 0.08527765171582982, 0.33929504571762287, 0.1932983536368378, 0.0036374435750729187, 0.12289545051895708,", "grid = sin(grid, 13, 3, -0.15800274281797377, 90.63950889076133) grid = sin(grid,", "(1/p)) / 1.07 return test_values(res) def smooth_min(x, t_indx, s1_indx, s2_indx,", "# GAS change date: 2021-11-28 09:20:21 UTC # GAS md5", "t_indx, s_indx, scale = 1, shift = 0): res =", "transit(grid, 9, [5], [1.0]) grid = transit(grid, 15, [12, 0,", "grid = power(grid, 10, 5, 0.12539493928522222) grid = power(grid, 0,", "s_indx, p = 1): res = x.copy() res[:,:,t_indx] = np.sign(x[:,:,s_indx])", "(x * 0.8090860808441245 + y * 0.2914526739617249) / 2 grid[:,:,2]", "import Image #PIL version: 8.1.2 #set initial params SIZE =", "res = x.copy() res[:,:,t_indx] = np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx]) ** p", "grid = transit(grid, 8, [2, 11, 15, 4, 1, 0,", "7, 5, 8, 9, 15], [0.085742434722219, 0.4119764535375412, 0.08377067725345017, 0.13045782410775286, 0.02917564277599849,", "grid = prod(grid, 9, [1, 4, 0, 6]) grid =", "/ 2 #apply transformations to the grid grid = transit(grid,", "0.12949271785123345, 0.2591612025511646]) grid = transit(grid, 10, [11, 4, 2, 8,", "grid[:,:,5] = (x * -0.5986715486203882 + y * 0.9515468928881716) /", "grid = transit(grid, 10, [5, 11, 15, 8, 2, 13,", "scale + shift) return test_values(res) def magnitude(x, t_indx, s_indx, ord", "10, 5], [0.5076634403621766, 0.003404332378773421, 0.04142944289977586, 0.4475027843592742]) grid = inverse(grid, 4,", "0.13799633398542827]) grid = transit(grid, 11, [0], [1.0]) grid = magnitude(grid,", "0.33929504571762287, 0.1932983536368378, 0.0036374435750729187, 0.12289545051895708, 0.19827928429148084]) grid = transit(grid, 8, [13,", "[11, 9, 2], [0.5001532946669459, 0.42070604285213226, 0.07914066248092186]) grid = inverse(grid, 5,", "[12, 15]) grid = prod(grid, 8, [11, 7, 4, 12])", "sin(grid, 10, 2, 0.9155140652310594, -34.1653400637653) grid = transit(grid, 8, [14],", "grid = sin(grid, 9, 9, -4.261918262131112, 18.680580924548693) grid = smooth_max(grid,", "[7, 6, 12, 8, 9, 0, 1], [0.05863158300898051, 0.3467981515651057, 0.262107802795733,", "grid = transit(grid, 7, [11, 9, 2], [0.5001532946669459, 0.42070604285213226, 0.07914066248092186])", "= sin(grid, 12, 13, 3.6938747278005737, 76.37702042567852) grid = magnitude(grid, 15,", "transit(grid, 7, [13], [1.0]) grid = sin(grid, 3, 12, 6.470760426148978,", "5, 12) grid = sin(grid, 10, 2, 0.9155140652310594, -34.1653400637653) grid", "2021-11-28 09:21:40 UTC # GAS change date: 2021-11-28 09:20:21 UTC", "10): res = x.copy() res[:,:,t_indx] = np.log((np.exp(x[:,:,s1_indx] * p) +", "shift[i])) * 2 - 1 return test_values(res) res = np.zeros((SIZE,", "3, 5, 0.10200689258338674) grid = transit(grid, 2, [10, 11, 4,", "prod(grid, 9, [1, 4, 0, 6]) grid = transit(grid, 8,", "-34.1653400637653) grid = transit(grid, 8, [14], [1.0]) grid = transit(grid,", "sin(grid, 9, 5, -5.606152225672729, -35.928477282758536) grid = transit(grid, 0, [7,", "-0.183401440709518, -88.40242580975152) grid = transit(grid, 12, [3, 13, 2, 9,", "-0.15800274281797377, 90.63950889076133) grid = sin(grid, 14, 14, -1.842523240371888, 74.23947694195837) grid", "= 10): res = x.copy() res[:,:,t_indx] = -np.log((np.exp(-x[:,:,s1_indx] * p)", "7, 14], [0.05801706264076675, 0.341923243761946, 0.0494872820880747, 0.29583940098242745, 0.2547330105267852]) grid = inverse(grid,", "3, 12, 6.470760426148978, -53.62090724330151) grid = sin(grid, 10, 10, 0.7827958631857042,", "4, [7, 6, 12, 8, 9, 0, 1], [0.05863158300898051, 0.3467981515651057,", "[2, 0], 2) grid = transit(grid, 13, [6, 2, 3,", "2, 14]) grid = prod(grid, 9, [10, 11, 8, 15,", "14, 14, -1.842523240371888, 74.23947694195837) grid = inverse(grid, 7, 8) grid", "11], [0.13835365002720226, 0.008781149737259792, 0.24627334258742545, 0.04870190081124998, 0.049950480577274, 0.15123046752435387, 0.31255198044446264, 0.04415702829077187]) grid", "grid = transit(grid, 2, [7], [1.0]) grid = sin(grid, 10,", "= np.zeros((SIZE, SIZE, GRID_CHANNELS)) x = ((np.arange(SIZE)/(SIZE-1) - 0.5) *", "smooth_max(grid, 13, 3, 8) grid = transit(grid, 13, [13, 0,", "grid = sin(grid, 3, 11, -6.496603906160505, -73.75617586359363) grid = transit(grid,", "transit(grid, 4, [11, 4, 15, 10, 8, 5, 2, 3],", "0.523275926379751, 0.31700196853838186]) grid = sin(grid, 14, 7, 5.409920766787869, -58.09956716630187) grid", "= transit(grid, 2, [1, 2], [0.9078557995211777, 0.09214420047882232]) grid = smooth_max(grid,", "** (1 + shift[i]) - 1) ** (1 / (1", "2, [13, 11, 5], [0.421270391024163, 0.5054038923567993, 0.07332571661903758]) grid = transit(grid,", "4, 15, -1.9527829039221054, 20.537776250912316) grid = transit(grid, 7, [11, 9,", "9, 7, -2.4657577404884132, 72.95418196004374) grid = transit(grid, 12, [7, 4,", "13, 13, 7.718114740496995, 55.242200715207815) grid = sin(grid, 12, 10, -3.1151555334821888,", "= transit(grid, 11, [12], [1.0]) grid = power(grid, 3, 5,", "10, 15, 10) grid = transit(grid, 11, [9, 0, 11,", "4.28026157040775, -75.14180284322572) grid = prod(grid, 3, [14, 15]) grid =", "0.32385711085429764]) grid = transit(grid, 1, [7, 2, 6, 1, 4,", "np.isnan(arr).any(): raise Exception('Array has None elements!') if np.amin(arr) < -1", "[1.0]) grid = transit(grid, 15, [12, 0, 1, 11], [0.01847979792505241,", "57.71107021356826) grid = transit(grid, 2, [12], [1.0]) grid = prod(grid,", "[1.0]) #create color space def shift_colors(x, shift): res = x.copy()", "= Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__) + '.png') #save layers img = np.zeros((SIZE", "15, -1.9527829039221054, 20.537776250912316) grid = transit(grid, 7, [11, 9, 2],", "* 0.5049774961793401 + y * 0.05113255120007798) / 2 grid[:,:,8] =", "1, 13, 5, 0, 7, 8, 9, 12, 6, 11,", "smooth_max(grid, 1, 15, 12) grid = prod(grid, 11, [3]) grid", "74.23947694195837) grid = inverse(grid, 7, 8) grid = smooth_max(grid, 10,", "grid = sin(grid, 4, 4, 3.47544933993972, -37.11795195118333) grid = sin(grid,", "7, 3, 1.6405444007982959, -37.09230830685477) grid = transit(grid, 9, [8], [1.0])", "j // 4 img[x*SIZE:(x + 1)*SIZE, y*SIZE:(y+1)*SIZE] = grid[:,:,j] img", "methods def transit(x, t_indx, s_indx, alphas): res = x.copy() res[:,:,t_indx]", "2, -2.2972705471452146, -12.522748365129786) grid = smooth_min(grid, 12, 9, 11) grid", "grid = transit(grid, 8, [3, 15, 9, 6, 11], [0.036102265915692405,", "transit(grid, 2, [1, 2], [0.9078557995211777, 0.09214420047882232]) grid = smooth_max(grid, 1,", "transit(grid, 10, [5, 11, 15, 8, 2, 13, 12, 3,", "= inverse(grid, 5, 5) grid = transit(grid, 4, [8, 4,", "10, 5, 13) grid = sin(grid, 9, 10, -1.8565532127479274, -54.75186223635349)", "14, 3, 5], [0.11084510086381213, 0.003439701966452383, 0.10819642722960272, 0.15371289739415475, 0.25812192912399506, 0.005727171643985687, 0.14633649245899077,", "0.03284446726347166, 0.04732779189481446, 0.13963294227934445]) grid = smooth_min(grid, 0, 13, 15) grid", "2, 9], 2) grid = sin(grid, 9, 5, -5.606152225672729, -35.928477282758536)", "1, 12, -0.5111321725063378, 18.261359970959475) grid = power(grid, 6, 5, 0.9223892145169746)", "1.2844464834351186, -45.836492724169695) grid = sin(grid, 1, 2, -1.5301674594368837, -60.29431568717391) grid", "* -0.6817079327248272) / 2 grid[:,:,11] = (x * 0.8435706697714382 +", "axis = -1) return test_values(res.clip(-1,1)) def sin(x, t_indx, s_indx, scale", "def smooth_min(x, t_indx, s1_indx, s2_indx, p = 10): res =", "[4, 14, 0, 13], [0.2785496566747933, 0.004915230889640017, 0.30146401859790545, 0.4150710938376613]) grid =", "[0.13643904772292245, 0.38438336340747, 0.15936221296996333, 0.31981537589964426]) grid = sin(grid, 10, 3, -2.5681840787633137,", "= prod(grid, 5, [3, 9, 2]) grid = sin(grid, 5,", "0.3010385316537353, 0.07412413198773361, 0.14949729304492473]) grid = magnitude(grid, 10, [11, 0, 5],", "SIZE, GRID_CHANNELS)) x = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((1, SIZE)).repeat(SIZE,", "[13, 10, 12, 2, 11, 14], 2) grid = transit(grid,", "-64.12547446801875) grid = sin(grid, 13, 3, -0.15800274281797377, 90.63950889076133) grid =", "= transit(grid, 11, [7, 2, 3, 9, 5], [0.24039798004748805, 0.2886075990223525,", "[7], 2) grid = transit(grid, 6, [9, 11, 2, 13],", "transit(grid, 1, [7, 2, 6, 1, 4, 0], [0.2070905138265326, 0.06562120796792839,", "10, -3.680544885171134, 30.633332441673872) grid = transit(grid, 11, [12, 6, 9],", "magnitude(grid, 8, [10, 9, 12, 4, 7, 15], 2) grid", "7, [13], [1.0]) grid = sin(grid, 3, 12, 6.470760426148978, -53.62090724330151)", "= x.copy() res[:,:,t_indx] = np.sum(x[:,:,s_indx] * alphas, axis = -1)", "[7, 15, 5], 2) grid = magnitude(grid, 9, [12, 14,", "v.1900 64 bit (AMD64)] # For more information visit: https://github.com/volotat/GAS", "8.1.2 #set initial params SIZE = 768 GRID_CHANNELS = 16", "-1) return test_values(res.clip(-1,1)) def sin(x, t_indx, s_indx, scale = 1,", "0.12962618248625338, 0.03090992138168193, 0.016043163973997736, 0.13259375374543056, 0.09920705802758992, 0.1415090600653345, 0.09597789664069131, 0.06106766497801195, 0.14032187015082653, 0.008288053054498123])", "grid = transit(grid, 11, [9, 0, 11, 7, 3, 8],", "transit(grid, 4, [8, 4, 15, 9, 10], [0.10267794314653868, 0.019022820046952493, 0.061606568183823145,", "8, 4) grid = transit(grid, 10, [1], [1.0]) grid =", "70.84834564082374) grid = transit(grid, 2, [11, 7, 13], [0.3629247592109436, 0.10073172896374764,", "[0.036102265915692405, 0.1224495166624379, 0.2384660328868578, 0.3357862916746864, 0.2671958928603256]) grid = smooth_min(grid, 1, 1,", "12, 2, 7, 4], [0.03047869593495055, 0.024092687676923453, 0.02665655056773558, 0.17667886361751853, 0.15211061797378253, 0.016462544099609754,", "= sin(grid, 9, 10, -1.8565532127479274, -54.75186223635349) grid = transit(grid, 10,", "initial params SIZE = 768 GRID_CHANNELS = 16 def test_values(arr):", "+ shift) - 1) ** (1 / (1 + shift))", "transit(grid, 1, [3], [1.0]) grid = magnitude(grid, 14, [4], 2)", "grid = sin(grid, 4, 9, 0.2366252211469413, -40.63773874328931) grid = sin(grid,", "[11, 4, 2, 8, 14], [0.3705316303566195, 0.1755951969700656, 0.043989590834687294, 0.22866693087969006, 0.1812166509589377])", "= magnitude(grid, 5, [7], 2) grid = transit(grid, 6, [9,", "power(x, t_indx, s_indx, p = 1): res = x.copy() res[:,:,t_indx]", "0.49901982570530873, 0.2682295923869092]) grid = magnitude(grid, 8, [10, 9, 12, 4,", "8, [3, 15, 9, 6, 11], [0.036102265915692405, 0.1224495166624379, 0.2384660328868578, 0.3357862916746864,", "15, 4.115946851379848) grid = transit(grid, 15, [13, 3], [0.5897775709748927, 0.41022242902510725])", "12, 0) grid = transit(grid, 2, [1, 2], [0.9078557995211777, 0.09214420047882232])", "change date: 2021-11-28 09:20:21 UTC # GAS md5 hash: ad55481e87ca5a7e9a8e92cd336d1cad", "grid = inverse(grid, 11, 5) grid = magnitude(grid, 14, [4,", "7, 8, 9, 12, 6, 11, 14], [0.10006330804326793, 0.03891760159161208, 0.005474465860804227,", "grid = transit(grid, 2, [1, 7], [0.18247956114317448, 0.8175204388568255]) grid =", "2, 3], [0.23701292672659616, 0.08316792464084911, 0.017867439461611043, 0.36417402420248035, 0.02841485585755143, 0.19916101840344472, 0.03422984110049058, 0.03597196960697647])", "grid = transit(grid, 15, [12, 0, 1, 11], [0.01847979792505241, 0.33442336387003857,", "= (x * 0.5049774961793401 + y * 0.05113255120007798) / 2", "grid = shift(grid, 6, 15, 4.115946851379848) grid = transit(grid, 15,", "0.08316792464084911, 0.017867439461611043, 0.36417402420248035, 0.02841485585755143, 0.19916101840344472, 0.03422984110049058, 0.03597196960697647]) grid = magnitude(grid,", "* np.abs(x[:,:,s_indx]) ** p return test_values(res) #set initial grid grid", "smooth_max(grid, 5, 8, 4) grid = transit(grid, 10, [1], [1.0])", "transit(grid, 10, [1], [1.0]) grid = transit(grid, 15, [15], [1.0])", "13, 3], [0.32356965941479515, 0.022696478437764827, 0.2132573540073865, 0.11957266769813353, 0.3209038404419199]) grid = transit(grid,", "= sin(grid, 1, 1, -0.183401440709518, -88.40242580975152) grid = transit(grid, 12,", "prod(grid, 11, [3]) grid = smooth_max(grid, 8, 11, 15) grid", "0.42070604285213226, 0.07914066248092186]) grid = inverse(grid, 5, 12) grid = sin(grid,", "-90.82177259964699) grid = transit(grid, 6, [8, 6, 5, 7, 4,", "np.amax(arr)) ) return arr #define grid transformation methods def transit(x,", "[7, 1, 11, 0, 15], [0.036901331671075975, 0.5054281720479712, 0.13288430351514774, 0.10820806749406277, 0.21657812527174225])", "2) ** (1 - shift) - 1) ** (1 /", "[7, 11, 15, 8, 12, 0, 4, 14, 3, 5],", "0.038001653167336905, 0.2112967596903696, 0.002128256606899112, 0.08103579316557531]) grid = shift(grid, 3, 3, 2.4622222565241207)", "8) grid = transit(grid, 13, [13, 0, 5, 14], [0.09662806703796267,", "magnitude(grid, 10, [7, 15, 5], 2) grid = magnitude(grid, 9,", "0.20955469836806906]) grid = transit(grid, 9, [5], [1.0]) grid = shift(grid,", "7, 3, 8], [0.03500911832175082, 0.03265868671024263, 0.3248025339288217, 0.4234363710484886, 0.13338109758306646, 0.050712192407629864]) grid", "2, 12], [0.05731677054419865, 0.08527765171582982, 0.33929504571762287, 0.1932983536368378, 0.0036374435750729187, 0.12289545051895708, 0.19827928429148084]) grid", "grid = transit(grid, 1, [8, 10, 15, 14, 9], [0.33493798319460544,", "inverse(grid, 7, 8) grid = smooth_max(grid, 10, 3, 15) grid", "= sin(grid, 1, 7, 1.6265187305620117, -97.13150019385894) grid = transit(grid, 11,", "[1, 7], [0.18247956114317448, 0.8175204388568255]) grid = transit(grid, 8, [11, 15,", "= transit(grid, 0, [14, 3, 11, 10, 7], [0.5203714128788618, 0.068511863728177,", "3, 7]) grid = sin(grid, 0, 3, -3.561651028660104, 11.539889679902203) grid", "- 1 return test_values(res) def inverse(x, t_indx, s_indx): res =", "-0.4262457935185371 + y * -0.6817079327248272) / 2 grid[:,:,11] = (x", "[0.06904450551777742, 0.12680650314665426, 0.1756104206123629, 0.013987480750913602, 0.1337935702206657, 0.39097327478734406, 0.08978424496428203]) grid = smooth_min(grid,", "7, 12, 0) grid = transit(grid, 2, [1, 2], [0.9078557995211777,", "5], [0.421270391024163, 0.5054038923567993, 0.07332571661903758]) grid = transit(grid, 11, [1, 15,", "[14, 15]) grid = inverse(grid, 5, 5) grid = transit(grid,", "[11, 10], [0.9817011300708863, 0.018298869929113594]) grid = sin(grid, 14, 8, -0.4693746108213766,", "11, 10], [0.2662646690994658, 0.2460545507972383, 0.4876807801032959]) grid = transit(grid, 2, [7],", "[0.20381942291270427, 0.07753380798970702, 0.11445683149439734, 0.08475226158626031, 0.1416941580568898, 0.020968563089492034, 0.0847896752697893, 0.0921589665387646, 0.008240731277180186, 0.17158558178481512])", "+ 1) * 127.5 im = Image.fromarray(np.uint8(img)) im.save(os.path.basename(__file__) + '_layers.png')", "sin(grid, 5, 1, 2.0751861425380627, 63.37681521624819) grid = smooth_min(grid, 11, 10,", "64 bit (AMD64)] # For more information visit: https://github.com/volotat/GAS #import", "went to far! [ %.2f : %.2f ]'%(np.amin(arr), np.amax(arr)) )", "[0.6926745567135898, 0.1831142410590532, 0.12421120222735695]) grid = magnitude(grid, 7, [6, 12, 7,", "0.0028158122366541832, 0.39271313845362693]) grid = magnitude(grid, 11, [13, 10, 12, 2,", "10, 4], [0.24075568684771534, 0.02527375632067568, 0.4828116495090197, 0.09546712897709621, 0.15569177834549294]) grid = sin(grid,", "0.1831142410590532, 0.12421120222735695]) grid = magnitude(grid, 7, [6, 12, 7, 13,", "0.05405846145210329, 0.11525379082942891, 0.11556721863292163, 0.12372657123165616, 0.1356897031789931, 0.20047556686480725, 0.09921434949484752, 0.05399039482501285]) grid =", "= prod(grid, 13, [6, 3, 7]) grid = sin(grid, 0,", "3], [0.23701292672659616, 0.08316792464084911, 0.017867439461611043, 0.36417402420248035, 0.02841485585755143, 0.19916101840344472, 0.03422984110049058, 0.03597196960697647]) grid", "grid = sin(grid, 4, 3, 0.10154488887533689, 12.479110491961137) grid = magnitude(grid,", "4, 2], [0.010597803396528332, 0.7371576932264431, 0.25224450337702853]) grid = sin(grid, 11, 8,", "0.06562120796792839, 0.17355051228662716, 0.05514926535269553, 0.0829726599151083, 0.41561584065110807]) grid = transit(grid, 2, [0,", "[0.530662002197574, 0.1082014600047566, 0.36113653779766947]) grid = transit(grid, 14, [10, 14, 4,", "7], 2) grid = transit(grid, 4, [4, 12, 14, 15,", "2) grid = shift(grid, 3, 9, 3.0393348894939773) grid = shift(grid,", "= transit(grid, 8, [5, 4, 15, 6, 14, 0, 3,", "2, [8, 7, 11, 10, 15, 0, 5]) grid =", "0.10073172896374764, 0.5363435118253088]) grid = sin(grid, 1, 5, 0.6814927249849106, 30.75954926767548) grid", "* -0.3867357840809138) / 2 grid[:,:,15] = (x * 0.49037959172682255 +", "sin(grid, 14, 7, 5.409920766787869, -58.09956716630187) grid = sin(grid, 2, 15,", "* p) + np.exp(x[:,:,s2_indx] * p)) ** (1/p)) / 1.07", "grid = sin(grid, 12, 10, -3.1151555334821888, 17.571856948335267) grid = prod(grid,", "0.4475027843592742]) grid = inverse(grid, 4, 5) grid = transit(grid, 1,", "grid = sin(grid, 8, 2, 3.501615294498545, -75.50049353340206) grid = prod(grid,", "0): res = x.copy() res[:,:,t_indx] = np.sin(x[:,:,s_indx] * 0.5 *", "1) * 2 - 1 if shift[i] < 0: res[:,:,i]", "* 0.05113255120007798) / 2 grid[:,:,8] = (x * -0.3391983246964396 +", "res[:,:,t_indx] = np.prod(x[:,:,s_indx], -1) return test_values(res) def power(x, t_indx, s_indx,", "[0.23275058190778222, 0.49901982570530873, 0.2682295923869092]) grid = magnitude(grid, 8, [10, 9, 12,", "6.219381309190064, -71.03631884776823) grid = sin(grid, 9, 6, 1.6821417847846682, -64.12547446801875) grid", "res[:,:,t_indx] = np.sum(x[:,:,s_indx] * alphas, axis = -1) return test_values(res.clip(-1,1))", "grid = transit(grid, 4, [3], [1.0]) grid = sin(grid, 3,", "3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit", "0, 6, 12, 2, 7, 4], [0.03047869593495055, 0.024092687676923453, 0.02665655056773558, 0.17667886361751853,", "grid = sin(grid, 10, 4, 1.2844464834351186, -45.836492724169695) grid = sin(grid,", "= transit(grid, 10, [14], [1.0]) grid = transit(grid, 15, [11,", "grid = prod(grid, 14, [13]) grid = sin(grid, 1, 12,", "= sin(grid, 9, 7, -2.4657577404884132, 72.95418196004374) grid = transit(grid, 12,", "smooth_min(grid, 11, 10, 9) grid = sin(grid, 13, 2, 4.295107938126156,", "0.5162695719847235, 0.25678193487542517]) grid = sin(grid, 9, 9, -4.261918262131112, 18.680580924548693) grid", "[8, 2], 2) grid = transit(grid, 7, [12, 11, 13,", "sin(grid, 4, 9, 0.2366252211469413, -40.63773874328931) grid = sin(grid, 9, 15,", "= sin(grid, 12, 10, -3.1151555334821888, 17.571856948335267) grid = prod(grid, 6,", "13, 1, 11, 3, 8, 7], [0.207462236904601, 0.11516125867317799, 0.12240760599022518, 0.05066197369764289,", "sin(grid, 4, 15, -1.9527829039221054, 20.537776250912316) grid = transit(grid, 7, [11,", "sin(grid, 11, 8, 4.303514875116891, -67.11152580467314) grid = prod(grid, 5, [3,", "y * 0.9515468928881716) / 2 grid[:,:,6] = (x * 0.2265055481768512", "4, 9, 1, 0, 14], [0.36336761526831185, 0.17372789204937897, 0.08036453739500136, 0.09747098994785518, 0.040818441056887325,", "res[:,:,t_indx] = np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx]) ** p return test_values(res) #set", "= transit(grid, 3, [11, 1, 12, 9, 0, 8, 15,", "11, 13], [0.03597236183123865, 0.04938629068404894, 0.08457069101219464, 0.014801187461296406, 0.3649334871683411, 0.28062233683539095, 0.08637063851194285, 0.06076815802338077,", "3, 11, 0, 14], 2) grid = sin(grid, 4, 5,", "4.295107938126156, 57.378601701270014) grid = sin(grid, 10, 2, -0.010214061334835559, 20.43114218394348) grid", "= smooth_max(grid, 2, 2, 11) grid = sin(grid, 13, 13,", "0.16834508849259286, 0.14540219911263502, 0.094441440303033]) grid = transit(grid, 11, [12], [1.0]) grid", "[0.6569516962992897, 0.3430483037007103]) grid = sin(grid, 14, 5, 0.053526366336325744, 4.147364704932215) grid", "6, -3.621533174445339, 24.02414911462421) grid = sin(grid, 1, 11, 0.5071121900678415, 10.950101187785563)", "* 0.8435706697714382 + y * 0.7746597063144072) / 2 grid[:,:,12] =", "0, 6]) grid = transit(grid, 8, [9, 3], [0.30088974760959275, 0.6991102523904072])", "9, 6, 11], [0.036102265915692405, 0.1224495166624379, 0.2384660328868578, 0.3357862916746864, 0.2671958928603256]) grid =", "3.7705302330112063, 56.91558505626969) grid = sin(grid, 3, 9, 1.4275963527158242, -76.78247379244436) grid", "1.6821417847846682, -64.12547446801875) grid = sin(grid, 13, 3, -0.15800274281797377, 90.63950889076133) grid", "-12.522748365129786) grid = smooth_min(grid, 12, 9, 11) grid = sin(grid,", "shift_colors(grid[:,:,0:1].repeat(3, -1), [1.9355805467383669, 1.4677093499726706, 1.2451388311186942]) res = res / 1", "0.5112825397666086, 37.95950546335726) grid = sin(grid, 12, 13, 3.6938747278005737, 76.37702042567852) grid", "* 0.49037959172682255 + y * -0.7671554143072785) / 2 #apply transformations", "15, [12, 0, 1, 11], [0.01847979792505241, 0.33442336387003857, 0.15192425697494277, 0.4951725812299663]) grid", "[5], [1.0]) grid = transit(grid, 15, [12, 0, 1, 11],", "4], [0.24075568684771534, 0.02527375632067568, 0.4828116495090197, 0.09546712897709621, 0.15569177834549294]) grid = sin(grid, 6,", "3, 15, 4, 2, 11, 13], [0.03597236183123865, 0.04938629068404894, 0.08457069101219464, 0.014801187461296406,", "grid = sin(grid, 5, 1, 2.0751861425380627, 63.37681521624819) grid = smooth_min(grid,", "= sin(grid, 9, 6, 1.6821417847846682, -64.12547446801875) grid = sin(grid, 13,", "3, [9], [1.0]) grid = transit(grid, 11, [2], [1.0]) #create", "10, [11, 0, 5], 2) grid = magnitude(grid, 9, [15,", "[8, 10, 15, 14, 9], [0.33493798319460544, 0.14040206011900094, 0.3010385316537353, 0.07412413198773361, 0.14949729304492473])", "-0.5303146721156469 + y * -0.41048419195488317) / 2 grid[:,:,13] = (x", "4, [11, 4, 15, 10, 8, 5, 2, 3], [0.23701292672659616,", "shift): res = x.copy() for i in range(x.shape[-1]): if shift[i]", "3.6427863324838423, 99.297524709649) grid = sin(grid, 5, 14, -1.45141083652418, -99.85812912291547) grid", "[12, 8, 10, 4, 2], [0.43102537693091664, 0.25433300797798253, 0.21618454566402304, 0.046743011673522995, 0.05171405775355483])", "magnitude(grid, 9, [15, 3, 11, 0, 14], 2) grid =", "2 grid[:,:,7] = (x * 0.5049774961793401 + y * 0.05113255120007798)", "= transit(grid, 2, [9, 11, 10], [0.2662646690994658, 0.2460545507972383, 0.4876807801032959]) grid", "= sin(grid, 13, 2, 4.295107938126156, 57.378601701270014) grid = sin(grid, 10,", "grid = magnitude(grid, 10, [7, 15, 5], 2) grid =", "0.19859698568682838, 0.4891861295353413]) grid = transit(grid, 13, [12, 15, 9, 2,", "grid = transit(grid, 0, [7, 11, 15, 8, 12, 0,", "grid = transit(grid, 8, [13, 9, 5, 7, 14], [0.05801706264076675,", "0.12421120222735695]) grid = magnitude(grid, 7, [6, 12, 7, 13, 8],", "= sin(grid, 10, 14, 0.8649185298731181, 3.1973516320924773) grid = sin(grid, 9,", "inverse(grid, 8, 5) grid = smooth_max(grid, 10, 5, 13) grid", "0, [0, 1, 2, 14]) grid = prod(grid, 9, [10,", "4, 5, -1.8457292172108153, -53.43885199947502) grid = sin(grid, 10, 0, 7.741409383532979,", "sin(grid, 1, 1, -0.183401440709518, -88.40242580975152) grid = transit(grid, 12, [3,", "-1.9527829039221054, 20.537776250912316) grid = transit(grid, 7, [11, 9, 2], [0.5001532946669459,", "(1/p)) / 1.07 return test_values(res) def prod(x, t_indx, s_indx): res", "numpy as np #Numpy version: 1.19.5 from PIL import Image", "0.04142944289977586, 0.4475027843592742]) grid = inverse(grid, 4, 5) grid = transit(grid,", "np.exp(x[:,:,s2_indx] * p)) ** (1/p)) / 1.07 return test_values(res) def", "grid[:,:,j] img = (img + 1) * 127.5 im =", "2], [0.5001532946669459, 0.42070604285213226, 0.07914066248092186]) grid = inverse(grid, 5, 12) grid", "power(grid, 14, 0, 0.10854801586669052) grid = shift(grid, 8, 9, 2.766857264282361)", "2) ** (1 + shift[i]) - 1) ** (1 /", "5) grid = transit(grid, 1, [4, 14, 0, 13], [0.2785496566747933,", "grid = transit(grid, 11, [5, 10, 7], [0.22694849313985146, 0.5162695719847235, 0.25678193487542517])", "/ 2) ** (1 + shift[i]) - 1) ** (1", "18.261359970959475) grid = power(grid, 6, 5, 0.9223892145169746) grid = transit(grid,", "9, 8, 5, 2, 12], [0.05731677054419865, 0.08527765171582982, 0.33929504571762287, 0.1932983536368378, 0.0036374435750729187,", "[0.9817011300708863, 0.018298869929113594]) grid = sin(grid, 14, 8, -0.4693746108213766, -98.17810769380118) grid", "s2_indx, p = 10): res = x.copy() res[:,:,t_indx] = np.log((np.exp(x[:,:,s1_indx]", "0.1314519110369097]) grid = transit(grid, 8, [6, 2], [0.6857167761482571, 0.31428322385174284]) grid", "-21.508000199215132) grid = shift(grid, 11, 5, 1.0526879494498724) grid = transit(grid,", "10, 12, 2, 11, 14], 2) grid = transit(grid, 12,", "11, [5, 10, 7], [0.22694849313985146, 0.5162695719847235, 0.25678193487542517]) grid = sin(grid,", "0], [0.08195235243098883, 0.6796005904358621, 0.23844705713314918]) grid = power(grid, 14, 0, 0.10854801586669052)", "[0.16813621041531998, 0.42150135317124293, 0.410362436413437]) grid = inverse(grid, 6, 6) grid =", "transit(grid, 10, [9, 8], [0.7777441717493406, 0.22225582825065934]) grid = transit(grid, 3,", "10]) grid = transit(grid, 2, [0, 15, 10], [0.005204838856346087, 0.5116602651328436,", "= transit(grid, 4, [14, 11, 12, 13, 4, 7], [0.23221079251346607,", "14, [11, 10]) grid = transit(grid, 2, [0, 15, 10],", "0.09921434949484752, 0.05399039482501285]) grid = transit(grid, 9, [5], [1.0]) grid =", "transit(grid, 8, [3], [1.0]) grid = inverse(grid, 8, 5) grid", "13, 11, 2, 9], 2) grid = sin(grid, 9, 5,", "7, 1], [0.20378471182464508, 0.038241020379710625, 0.16903312106740406, 0.3387613981701764, 0.11303295854369695, 0.13714679001436697]) grid =", "shift(x, t_indx, s_indx, shift): res = x.copy() if shift >", "sin(x, t_indx, s_indx, scale = 1, shift = 0): res", "12, 6, -3.621533174445339, 24.02414911462421) grid = sin(grid, 1, 11, 0.5071121900678415,", "0, 2, 4, 8, 5, 6, 7]) grid = transit(grid,", "y * -0.5135707069423852) / 2 grid[:,:,9] = (x * -0.4075423366723827", "= transit(grid, 9, [5], [1.0]) grid = shift(grid, 9, 13,", "shift_colors(x, shift): res = x.copy() for i in range(x.shape[-1]): if", "[6, 15, 11, 9, 12], [0.21908823570589997, 0.1636179110868493, 0.03797238284324163, 0.29532957711092916, 0.2839918932530799])", "0.24583958083442445, 0.2244518823086713, 0.13799633398542827]) grid = transit(grid, 11, [0], [1.0]) grid", "5, 10, -1.5052434957207308, 24.900059771988836) grid = sin(grid, 8, 10, 2.5947698108630664,", "* -0.5155435342135386) / 2 grid[:,:,4] = (x * -0.6644350461377522 +", "8, 9, 12, 2], [0.031587088727564654, 0.024264739611302585, 0.0306940545567164, 0.19611241111174804, 0.7173417059926683]) grid", "transit(grid, 0, [12, 6, 4, 9, 1, 0, 14], [0.36336761526831185,", "0.06226802479442838, 0.0028158122366541832, 0.39271313845362693]) grid = magnitude(grid, 11, [13, 10, 12,", "[9, 11, 2, 13], [0.381505247910628, 0.12073241493361198, 0.3454992433435407, 0.15226309381221942]) grid =", "11, 2, 9], 2) grid = sin(grid, 9, 5, -5.606152225672729,", "0.5) * 2).reshape((SIZE, 1)).repeat(SIZE, 1) grid[:,:,0] = (x * 0.9386329219527516", "2], 2) grid = transit(grid, 7, [12, 11, 13, 4],", "grid = sin(grid, 2, 15, -2.5319898824657017, -45.01904701883333) grid = shift(grid,", "grid = sin(grid, 1, 7, 1.6265187305620117, -97.13150019385894) grid = transit(grid,", "[0.3629247592109436, 0.10073172896374764, 0.5363435118253088]) grid = sin(grid, 1, 5, 0.6814927249849106, 30.75954926767548)", "3, 9, 1.4275963527158242, -76.78247379244436) grid = sin(grid, 2, 5, -5.225820110717917,", "%.2f : %.2f ]'%(np.amin(arr), np.amax(arr)) ) return arr #define grid", "(x * -0.4075423366723827 + y * 0.5388833863473126) / 2 grid[:,:,10]", "grid = transit(grid, 15, [13, 3], [0.5897775709748927, 0.41022242902510725]) grid =", "= transit(grid, 8, [13, 9, 5, 7, 14], [0.05801706264076675, 0.341923243761946,", "grid = magnitude(grid, 14, [4], 2) grid = sin(grid, 1,", "shift[i] > 0: res[:,:,i] = (-np.abs(((x[:,:,i] + 1) / 2)", "24.02414911462421) grid = sin(grid, 1, 11, 0.5071121900678415, 10.950101187785563) grid =", "np.log((np.exp(x[:,:,s1_indx] * p) + np.exp(x[:,:,s2_indx] * p)) ** (1/p)) /", "12, 8, 9, 0, 1], [0.05863158300898051, 0.3467981515651057, 0.262107802795733, 0.038001653167336905, 0.2112967596903696,", "2, [0, 15, 10], [0.005204838856346087, 0.5116602651328436, 0.48313489601081044]) grid = transit(grid,", "def transit(x, t_indx, s_indx, alphas): res = x.copy() res[:,:,t_indx] =", "6, 10, 7, 4]) grid = smooth_min(grid, 7, 12, 0)", "9, 1, 0, 14], [0.36336761526831185, 0.17372789204937897, 0.08036453739500136, 0.09747098994785518, 0.040818441056887325, 0.16796111771248814,", "0.16796111771248814, 0.07628940657007711]) grid = transit(grid, 3, [11, 1, 12, 9,", "= transit(grid, 7, [14, 2, 13, 1, 11, 3, 8,", "[0.5203714128788618, 0.068511863728177, 0.10141059844877331, 0.2728285912351676, 0.036877533709020166]) grid = transit(grid, 7, [11],", "[0.381505247910628, 0.12073241493361198, 0.3454992433435407, 0.15226309381221942]) grid = magnitude(grid, 10, [7, 15,", "transit(grid, 11, [0], [1.0]) grid = magnitude(grid, 0, [4, 13],", "+ y * -0.41048419195488317) / 2 grid[:,:,13] = (x *", "5, 0.10200689258338674) grid = transit(grid, 2, [10, 11, 4, 15,", "grid = transit(grid, 7, [15, 6, 2, 7], [0.45073658968521574, 0.16060948991238613,", "grid = transit(grid, 5, [1, 9, 3, 10, 4], [0.24075568684771534,", "= ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((SIZE, 1)).repeat(SIZE, 1) grid[:,:,0] =", "4]) grid = smooth_min(grid, 7, 12, 0) grid = transit(grid,", "9, 6.219381309190064, -71.03631884776823) grid = sin(grid, 9, 6, 1.6821417847846682, -64.12547446801875)", "transit(grid, 6, [8, 6, 5, 7, 4, 2], [0.39579476392315127, 0.3200094081197146,", "shift)) * 2 - 1 return test_values(res) def inverse(x, t_indx,", "= np.linalg.norm(x[:,:,s_indx], axis = -1, ord = ord) / np.sqrt(len(s_indx))", "= transit(grid, 1, [14], [1.0]) grid = transit(grid, 8, [9,", "3, 8, 7], [0.207462236904601, 0.11516125867317799, 0.12240760599022518, 0.05066197369764289, 0.13869178538077429, 0.09948828746526778, 0.16686217850764798,", "12, 7, 13, 8], 2) grid = transit(grid, 8, [3,", "sin(grid, 7, 15, -4.9164570678736865, 86.15931416043557) grid = sin(grid, 1, 7,", "7, 4]) grid = smooth_min(grid, 7, 12, 0) grid =", "grid = smooth_min(grid, 12, 9, 11) grid = sin(grid, 4,", "0.2244518823086713, 0.13799633398542827]) grid = transit(grid, 11, [0], [1.0]) grid =", "res[:,:,t_indx] = -x[:,:,s_indx] return test_values(res) def smooth_max(x, t_indx, s1_indx, s2_indx,", "3, 14], [0.28064413535886806, 0.5181512474389621, 0.1504742947642479, 0.050730322437922]) grid = prod(grid, 1,", "Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)] #", "1 return test_values(res) res = np.zeros((SIZE, SIZE, 3)) res +=", "[8], 2) grid = transit(grid, 13, [15, 5, 9, 4,", "res[:,:,t_indx] = np.sin(x[:,:,s_indx] * 0.5 * np.pi * scale +", "(x [:,:,i]+ 1) / 2) ** (1 - shift[i]) -", "grid = sin(grid, 11, 7, -0.3409112713023047, 75.93313567333723) grid = transit(grid,", "shift > 0: res[:,:,t_indx] = (-np.abs(((x[:,:,s_indx] + 1) / 2)", "grid = sin(grid, 12, 13, 3.6938747278005737, 76.37702042567852) grid = magnitude(grid,", "2, 5, -5.225820110717917, 57.71107021356826) grid = transit(grid, 2, [12], [1.0])", "2], [0.9078557995211777, 0.09214420047882232]) grid = smooth_max(grid, 1, 0, 1) grid", "10, [5, 2]) grid = transit(grid, 15, [0, 3], [0.29345909580747953,", "grid = sin(grid, 9, 8, -0.8743741598911887, 15.92872484723533) grid = transit(grid,", "15], [0.530662002197574, 0.1082014600047566, 0.36113653779766947]) grid = transit(grid, 14, [10, 14,", "= ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((1, SIZE)).repeat(SIZE, 0) y =", "15, 9, 2, 0, 1, 5], [0.18796556626817826, 0.19260744772691155, 0.11226112831146452, 0.08161640805634696,", "transit(grid, 2, [1, 7], [0.18247956114317448, 0.8175204388568255]) grid = transit(grid, 8,", "= sin(grid, 4, 2, -3.329894296119046, -76.41676919069447) grid = smooth_min(grid, 11,", "0.3387613981701764, 0.11303295854369695, 0.13714679001436697]) grid = transit(grid, 4, [14, 11, 12,", "= np.prod(x[:,:,s_indx], -1) return test_values(res) def power(x, t_indx, s_indx, p", "3, 11, 10, 7], [0.5203714128788618, 0.068511863728177, 0.10141059844877331, 0.2728285912351676, 0.036877533709020166]) grid", "[1.0]) grid = power(grid, 3, 5, 0.10200689258338674) grid = transit(grid,", "2) grid = transit(grid, 7, [12, 11, 13, 4], [0.1713900685471786,", "1) / 2) ** (1 - shift) - 1) **", "#set initial grid grid = np.zeros((SIZE, SIZE, GRID_CHANNELS)) x =", "2, 1, 13, 12, 0, 8], [0.08486049729383285, 0.15069099224942706, 0.024923245737924458, 0.07191051851248272,", "[0.7777441717493406, 0.22225582825065934]) grid = transit(grid, 3, [9], [1.0]) grid =", "15, 13, 3], [0.32356965941479515, 0.022696478437764827, 0.2132573540073865, 0.11957266769813353, 0.3209038404419199]) grid =", "test_values(res) #set initial grid grid = np.zeros((SIZE, SIZE, GRID_CHANNELS)) x", "15]) grid = inverse(grid, 5, 5) grid = transit(grid, 4,", "sin(grid, 10, 14, 0.8649185298731181, 3.1973516320924773) grid = sin(grid, 9, 7,", "grid = smooth_max(grid, 13, 3, 8) grid = transit(grid, 13,", "np.abs((1 - (x [:,:,i]+ 1) / 2) ** (1 -", "= prod(grid, 8, [11, 7, 4, 12]) grid = transit(grid,", "11, 1]) grid = smooth_min(grid, 3, 2, 7) grid =", "grid = transit(grid, 12, [3, 13, 2, 9, 0], [0.24803411847529433,", "2, 2, 11) grid = sin(grid, 13, 13, 7.718114740496995, 55.242200715207815)", "10, 0.9558311639914843, -47.618914508652054) grid = shift(grid, 9, 8, -1.1449289879251126) grid", "grid[:,:,0] = (x * 0.9386329219527516 + y * -0.45147169454413794) /", "8, 12, 0, 4, 14, 3, 5], [0.11084510086381213, 0.003439701966452383, 0.10819642722960272,", "3.6938747278005737, 76.37702042567852) grid = magnitude(grid, 15, [5, 3, 8, 0,", "- 1) ** (1 / (1 - shift)) * 2", "9, 12, 4, 7, 15], 2) grid = sin(grid, 12,", "14, [4, 6, 1, 0], 2) grid = transit(grid, 13,", "2) grid = magnitude(grid, 9, [12, 14, 4], 2) grid", "0.25433300797798253, 0.21618454566402304, 0.046743011673522995, 0.05171405775355483]) grid = sin(grid, 10, 10, 0.9558311639914843,", "-1), [1.9355805467383669, 1.4677093499726706, 1.2451388311186942]) res = res / 1 res", "2, -0.010214061334835559, 20.43114218394348) grid = transit(grid, 8, [1], [1.0]) grid", "= transit(grid, 13, [13, 0, 5, 14], [0.09662806703796267, 0.1621478194912538, 0.21548762580464817,", "y * 0.1739322518414499) / 2 grid[:,:,5] = (x * -0.5986715486203882", "14], [0.28064413535886806, 0.5181512474389621, 0.1504742947642479, 0.050730322437922]) grid = prod(grid, 1, [12,", "the grid grid = transit(grid, 4, [7, 6, 12, 8,", "9, 5], [0.24039798004748805, 0.2886075990223525, 0.18742374307846998, 0.11615833154358073, 0.16741234630810867]) grid = prod(grid,", "transit(grid, 7, [14, 2, 13, 1, 11, 3, 8, 7],", "-0.2952350240798842) grid = sin(grid, 11, 6, 1.576100090732909, -21.508000199215132) grid =", "sin(grid, 5, 10, -1.5052434957207308, 24.900059771988836) grid = sin(grid, 8, 10,", "13, 6], [0.3199750359220948, 0.07376266150860299, 0.03622483092076182, 0.09070212266434277, 0.4030414045204916, 0.07629394446370606]) grid =", "grid[:,:,10] = (x * -0.4262457935185371 + y * -0.6817079327248272) /", "np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx]) ** p return test_values(res) #set initial grid", "= magnitude(grid, 13, [2, 0], 2) grid = transit(grid, 13,", "[3]) grid = smooth_max(grid, 8, 11, 15) grid = sin(grid,", "return test_values(res.clip(-1,1)) def sin(x, t_indx, s_indx, scale = 1, shift", "10, 2, 15, 13], [0.3265190472987195, 0.21568397721657098, 0.06226802479442838, 0.0028158122366541832, 0.39271313845362693]) grid", "-6.909579361872105, 70.84834564082374) grid = transit(grid, 2, [11, 7, 13], [0.3629247592109436,", "smooth_min(grid, 7, 12, 0) grid = transit(grid, 2, [1, 2],", "9, 5, -5.606152225672729, -35.928477282758536) grid = transit(grid, 0, [7, 11,", "14], [0.29712982335534416, 0.2526657169525107, 0.08415696601637544, 0.18541009701166816, 0.011062110917544764, 0.017334502896306194, 0.1522407828502505]) grid =", "2) grid = transit(grid, 6, [15, 8], [0.5303803951305812, 0.4696196048694189]) grid", "14, [10, 14, 4, 9, 13, 6], [0.3199750359220948, 0.07376266150860299, 0.03622483092076182,", "= sin(grid, 5, 1, 2.0751861425380627, 63.37681521624819) grid = smooth_min(grid, 11,", "prod(grid, 11, [9]) grid = sin(grid, 4, 3, 0.10154488887533689, 12.479110491961137)", "= transit(grid, 13, [6, 2, 3, 15, 5, 7], [0.06492287400539203,", "6, 4, 9, 1, 0, 14], [0.36336761526831185, 0.17372789204937897, 0.08036453739500136, 0.09747098994785518,", "grid = transit(grid, 8, [9, 10, 2, 15, 13], [0.3265190472987195,", "-0.5063344373124843) / 2 grid[:,:,3] = (x * -0.8484277738516293 + y", "grid = prod(grid, 2, [3, 11, 1]) grid = smooth_min(grid,", "[9, 10, 2, 15, 13], [0.3265190472987195, 0.21568397721657098, 0.06226802479442838, 0.0028158122366541832, 0.39271313845362693])", "grid = sin(grid, 11, 6, 1.576100090732909, -21.508000199215132) grid = shift(grid,", "-4.9164570678736865, 86.15931416043557) grid = sin(grid, 1, 7, 1.6265187305620117, -97.13150019385894) grid", "2) grid = sin(grid, 1, 5, 8.18216846853571, -6.729427492311089) grid =", "[2, 11, 15, 4, 1, 0, 14], [0.29712982335534416, 0.2526657169525107, 0.08415696601637544,", "7], [0.23221079251346607, 0.3307147367708056, 0.26199556841553734, 0.018127231672754242, 0.13788777275073352, 0.01906389787670339]) grid = sin(grid,", "j in range(GRID_CHANNELS): x = j % 4 y =", "= transit(grid, 2, [13, 11, 5], [0.421270391024163, 0.5054038923567993, 0.07332571661903758]) grid", "6, [15], [1.0]) grid = sin(grid, 15, 0, -0.033265790773207085, 51.94880270063618)", "grid = transit(grid, 11, [7, 2, 3, 9, 5], [0.24039798004748805,", "15, 4, 2, 11, 13], [0.03597236183123865, 0.04938629068404894, 0.08457069101219464, 0.014801187461296406, 0.3649334871683411,", "-76.41676919069447) grid = smooth_min(grid, 11, 8, 12) grid = transit(grid,", "2, 8, 0], [0.28772794692354614, 0.1935939805514465, 0.06024872230823076, 0.13457223936247906, 0.32385711085429764]) grid =", "0.11445683149439734, 0.08475226158626031, 0.1416941580568898, 0.020968563089492034, 0.0847896752697893, 0.0921589665387646, 0.008240731277180186, 0.17158558178481512]) grid =", "5], 2) grid = magnitude(grid, 9, [15, 3, 11, 0,", "7]) grid = transit(grid, 8, [3], [1.0]) grid = inverse(grid,", "[0.22694849313985146, 0.5162695719847235, 0.25678193487542517]) grid = sin(grid, 9, 9, -4.261918262131112, 18.680580924548693)", "grid = sin(grid, 10, 0, 7.741409383532979, -12.082110529508299) grid = prod(grid,", "1.6405444007982959, -37.09230830685477) grid = transit(grid, 9, [8], [1.0]) grid =", "Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__) + '.png') #save layers img = np.zeros((SIZE *", "0.3248025339288217, 0.4234363710484886, 0.13338109758306646, 0.050712192407629864]) grid = transit(grid, 7, [14, 2,", "0.20922781529873477, 0.16179927966914437, 0.30433226546908315]) grid = magnitude(grid, 6, [14, 5, 13,", "2, 9, 4, 8], [0.06904450551777742, 0.12680650314665426, 0.1756104206123629, 0.013987480750913602, 0.1337935702206657, 0.39097327478734406,", "= transit(grid, 15, [11, 4, 10], [0.6926745567135898, 0.1831142410590532, 0.12421120222735695]) grid", "np.exp(-x[:,:,s2_indx] * p)) ** (1/p)) / 1.07 return test_values(res) def", "magnitude(grid, 10, [11, 0, 5], 2) grid = magnitude(grid, 9,", "10, 15) grid = transit(grid, 1, [12, 8, 10, 4,", "grid = shift(grid, 3, 9, 3.0393348894939773) grid = shift(grid, 2,", "7, 2, 3.41043792019894, 65.36615977552518) grid = transit(grid, 0, [14, 3,", "[0.5303803951305812, 0.4696196048694189]) grid = inverse(grid, 0, 0) grid = magnitude(grid,", "60.95572898751007) grid = shift(grid, 14, 2, 2.55681173849493) grid = sin(grid,", "- 1 if shift[i] < 0: res[:,:,i] = np.abs((1 -", ": %.2f ]'%(np.amin(arr), np.amax(arr)) ) return arr #define grid transformation", "10, 15, 0, 5]) grid = transit(grid, 11, [7, 2,", "[MSC v.1900 64 bit (AMD64)] # For more information visit:", "transit(grid, 7, [15, 6, 2, 7], [0.45073658968521574, 0.16060948991238613, 0.12949271785123345, 0.2591612025511646])", "transit(grid, 12, [3, 13, 2, 9, 0], [0.24803411847529433, 0.2425397323068922, 0.0904752958055755,", "sin(grid, 13, 13, 7.718114740496995, 55.242200715207815) grid = sin(grid, 12, 10,", "= prod(grid, 11, [9]) grid = sin(grid, 4, 3, 0.10154488887533689,", "3.0393348894939773) grid = shift(grid, 2, 4, 2.1961962516242517) grid = prod(grid,", "grid[:,:,12] = (x * -0.5303146721156469 + y * -0.41048419195488317) /", "y = j // 4 img[x*SIZE:(x + 1)*SIZE, y*SIZE:(y+1)*SIZE] =", "- shift)) * 2 - 1 return test_values(res) def inverse(x,", "= transit(grid, 4, [3], [1.0]) grid = sin(grid, 3, 12,", "10, 14, 0.8649185298731181, 3.1973516320924773) grid = sin(grid, 9, 7, -2.4657577404884132,", "transit(grid, 13, [6, 2, 3, 15, 5, 7], [0.06492287400539203, 0.21223490901058306,", "10, [14], [1.0]) grid = transit(grid, 15, [11, 4, 10],", "[1.0]) grid = magnitude(grid, 14, [4], 2) grid = sin(grid,", "= inverse(grid, 0, 0) grid = magnitude(grid, 13, [8], 2)", "9, 4, 8], [0.06904450551777742, 0.12680650314665426, 0.1756104206123629, 0.013987480750913602, 0.1337935702206657, 0.39097327478734406, 0.08978424496428203])", "0.02917564277599849, 0.12489006625007311, 0.13398690135296518]) grid = transit(grid, 2, [2, 0, 11,", "0.046743011673522995, 0.05171405775355483]) grid = sin(grid, 10, 10, 0.9558311639914843, -47.618914508652054) grid", "[0.3265190472987195, 0.21568397721657098, 0.06226802479442838, 0.0028158122366541832, 0.39271313845362693]) grid = magnitude(grid, 11, [13,", "[9, 0, 11, 7, 3, 8], [0.03500911832175082, 0.03265868671024263, 0.3248025339288217, 0.4234363710484886,", "2, 13], [0.381505247910628, 0.12073241493361198, 0.3454992433435407, 0.15226309381221942]) grid = magnitude(grid, 10,", "= sin(grid, 14, 5, 0.053526366336325744, 4.147364704932215) grid = transit(grid, 4,", "sin(grid, 4, 3, 2.634465399239887, 62.07538440217337) grid = sin(grid, 7, 2,", "def power(x, t_indx, s_indx, p = 1): res = x.copy()", "2, [0, 4, 2], [0.010597803396528332, 0.7371576932264431, 0.25224450337702853]) grid = sin(grid,", "p = 1): res = x.copy() res[:,:,t_indx] = np.sign(x[:,:,s_indx]) *", "= magnitude(grid, 15, [5, 3, 8, 0, 15], 2) grid", "4, 10], [0.6926745567135898, 0.1831142410590532, 0.12421120222735695]) grid = magnitude(grid, 7, [6,", "15, -2.507870105026106, -89.43842740853354) grid = transit(grid, 0, [12, 6, 4,", "15, 10], [0.005204838856346087, 0.5116602651328436, 0.48313489601081044]) grid = transit(grid, 10, [10],", "6, [1, 7, 0, 2, 9, 4, 8], [0.06904450551777742, 0.12680650314665426,", "= transit(grid, 8, [14], [1.0]) grid = transit(grid, 4, [1,", "[7, 3], [0.9172074355564371, 0.08279256444356292]) grid = transit(grid, 13, [1, 2,", "-1, ord = ord) / np.sqrt(len(s_indx)) return test_values(res) def shift(x,", "p = 10): res = x.copy() res[:,:,t_indx] = np.log((np.exp(x[:,:,s1_indx] *", "0], [0.24803411847529433, 0.2425397323068922, 0.0904752958055755, 0.11683555248582808, 0.30211530092641004]) grid = sin(grid, 5,", "[14, 5, 13, 11, 2, 9], 2) grid = sin(grid,", "+ y * 0.1739322518414499) / 2 grid[:,:,5] = (x *", "0.022574848472165728]) grid = transit(grid, 4, [11, 4, 15, 10, 8,", "7) grid = prod(grid, 10, [5, 2]) grid = transit(grid,", "1) / 2) ** (1 + shift[i]) - 1) **", "t_indx, s_indx, p = 1): res = x.copy() res[:,:,t_indx] =", "0.12833432959710458, 0.1314519110369097]) grid = transit(grid, 8, [6, 2], [0.6857167761482571, 0.31428322385174284])", "sin(grid, 2, 15, -2.5319898824657017, -45.01904701883333) grid = shift(grid, 5, 5,", "Synthesizer\" # Generation date: 2021-11-28 09:21:40 UTC # GAS change", "grid = transit(grid, 4, [7, 6, 12, 8, 9, 0,", "12, 9, 0, 8, 15, 2, 10, 14], [0.20381942291270427, 0.07753380798970702,", "grid = sin(grid, 14, 7, 5.409920766787869, -58.09956716630187) grid = sin(grid,", "4, 12]) grid = transit(grid, 7, [15, 6, 2, 7],", "grid = magnitude(grid, 0, [5, 0], 2) grid = transit(grid,", "[14], [1.0]) grid = transit(grid, 8, [9, 10, 2, 15,", "0.12372657123165616, 0.1356897031789931, 0.20047556686480725, 0.09921434949484752, 0.05399039482501285]) grid = transit(grid, 9, [5],", "0.008781149737259792, 0.24627334258742545, 0.04870190081124998, 0.049950480577274, 0.15123046752435387, 0.31255198044446264, 0.04415702829077187]) grid = transit(grid,", "-45.836492724169695) grid = sin(grid, 1, 2, -1.5301674594368837, -60.29431568717391) grid =", "13], [0.03597236183123865, 0.04938629068404894, 0.08457069101219464, 0.014801187461296406, 0.3649334871683411, 0.28062233683539095, 0.08637063851194285, 0.06076815802338077, 0.022574848472165728])", "np.pi * scale + shift) return test_values(res) def magnitude(x, t_indx,", "2 grid[:,:,9] = (x * -0.4075423366723827 + y * 0.5388833863473126)", "= transit(grid, 15, [7, 3], [0.9172074355564371, 0.08279256444356292]) grid = transit(grid,", "#save layers img = np.zeros((SIZE * 4, SIZE * 4))", "[0.5076634403621766, 0.003404332378773421, 0.04142944289977586, 0.4475027843592742]) grid = inverse(grid, 4, 5) grid", "grid = transit(grid, 3, [9], [1.0]) grid = transit(grid, 11,", "0.04732779189481446, 0.13963294227934445]) grid = smooth_min(grid, 0, 13, 15) grid =", "[8, 4, 15, 9, 10], [0.10267794314653868, 0.019022820046952493, 0.061606568183823145, 0.4832751235896067, 0.33341754503307897])", "7.718114740496995, 55.242200715207815) grid = sin(grid, 12, 10, -3.1151555334821888, 17.571856948335267) grid", "4, 4, 3.47544933993972, -37.11795195118333) grid = sin(grid, 11, 7, -0.3409112713023047,", "shift(grid, 11, 5, 1.0526879494498724) grid = transit(grid, 1, [14], [1.0])", "2, 13, 1, 11, 3, 8, 7], [0.207462236904601, 0.11516125867317799, 0.12240760599022518,", "0, 14], 2) grid = sin(grid, 4, 5, -1.8457292172108153, -53.43885199947502)", "axis = -1, ord = ord) / np.sqrt(len(s_indx)) return test_values(res)", "grid = transit(grid, 0, [7, 1, 11, 0, 15], [0.036901331671075975,", "- shift[i])) * 2 - 1 return test_values(res) res =", "7], [0.18247956114317448, 0.8175204388568255]) grid = transit(grid, 8, [11, 15, 0],", "grid = transit(grid, 6, [1, 7, 0, 2, 9, 4,", "-12.082110529508299) grid = prod(grid, 11, [9]) grid = sin(grid, 4,", "sin(grid, 10, 2, -0.010214061334835559, 20.43114218394348) grid = transit(grid, 8, [1],", "4, 8, 5, 6, 7]) grid = transit(grid, 8, [3],", "date: 2021-11-28 09:21:40 UTC # GAS change date: 2021-11-28 09:20:21", "= sin(grid, 10, 10, 0.7827958631857042, -90.82177259964699) grid = transit(grid, 6,", "1) ** (1 / (1 - shift)) * 2 -", "/ (1 - shift[i])) * 2 - 1 return test_values(res)", "= (-np.abs(((x[:,:,i] + 1) / 2) ** (1 + shift[i])", "[1.0]) grid = shift(grid, 9, 13, -5.367438086043798) grid = magnitude(grid,", "grid = transit(grid, 9, [5], [1.0]) grid = transit(grid, 15,", "def smooth_max(x, t_indx, s1_indx, s2_indx, p = 10): res =", "14, 0, 13], [0.2785496566747933, 0.004915230889640017, 0.30146401859790545, 0.4150710938376613]) grid = sin(grid,", "grid = transit(grid, 7, [4, 10, 1, 13, 5, 0,", "9, 4, 6, 12], [0.18067242214638962, 0.12939497982917472, 0.08164480089591167, 0.24583958083442445, 0.2244518823086713, 0.13799633398542827])", "transit(grid, 7, [4, 10, 1, 13, 5, 0, 7, 8,", "1, 0], 2) grid = transit(grid, 13, [11, 0], [0.6569516962992897,", "11, [8, 2], 2) grid = transit(grid, 7, [12, 11,", "sin(grid, 4, 2, -3.329894296119046, -76.41676919069447) grid = smooth_min(grid, 11, 8,", "Art Synthesizer\" # Generation date: 2021-11-28 09:21:40 UTC # GAS", "-0.45147169454413794) / 2 grid[:,:,1] = (x * 0.8090860808441245 + y", "0.016462544099609754, 0.0072484377164178625, 0.4477791048998878, 0.11849249751317383]) grid = transit(grid, 10, [5, 11,", "3, 6], [0.1020239434902293, 0.05405846145210329, 0.11525379082942891, 0.11556721863292163, 0.12372657123165616, 0.1356897031789931, 0.20047556686480725, 0.09921434949484752,", "8], [0.06904450551777742, 0.12680650314665426, 0.1756104206123629, 0.013987480750913602, 0.1337935702206657, 0.39097327478734406, 0.08978424496428203]) grid =", "7]) grid = sin(grid, 0, 3, -3.561651028660104, 11.539889679902203) grid =", "0.3200094081197146, 0.06439062651950353, 0.03284446726347166, 0.04732779189481446, 0.13963294227934445]) grid = smooth_min(grid, 0, 13,", "p = 10): res = x.copy() res[:,:,t_indx] = -np.log((np.exp(-x[:,:,s1_indx] *", "0.15936221296996333, 0.31981537589964426]) grid = sin(grid, 10, 3, -2.5681840787633137, -30.256455817944243) grid", "]'%(np.amin(arr), np.amax(arr)) ) return arr #define grid transformation methods def", "grid = magnitude(grid, 9, [15, 3, 11, 0, 14], 2)", "grid = sin(grid, 10, 0, 0.5112825397666086, 37.95950546335726) grid = sin(grid,", "grid = sin(grid, 1, 5, 0.6814927249849106, 30.75954926767548) grid = inverse(grid,", "15, 0], [0.08195235243098883, 0.6796005904358621, 0.23844705713314918]) grid = power(grid, 14, 0,", "magnitude(grid, 11, [13, 10, 12, 2, 11, 14], 2) grid", "* -0.4075423366723827 + y * 0.5388833863473126) / 2 grid[:,:,10] =", "transit(grid, 13, [12, 15, 9, 2, 0, 1, 5], [0.18796556626817826,", "0.18742374307846998, 0.11615833154358073, 0.16741234630810867]) grid = prod(grid, 0, [0, 1, 2,", "5, 13, 11, 2, 9], 2) grid = sin(grid, 9,", "/ np.sqrt(len(s_indx)) return test_values(res) def shift(x, t_indx, s_indx, shift): res", "-3.329894296119046, -76.41676919069447) grid = smooth_min(grid, 11, 8, 12) grid =", "os #OS version: default import numpy as np #Numpy version:", "1.576100090732909, -21.508000199215132) grid = shift(grid, 11, 5, 1.0526879494498724) grid =", "11.185401112275173) grid = sin(grid, 10, 4, 1.2844464834351186, -45.836492724169695) grid =", "1, [4, 14, 0, 13], [0.2785496566747933, 0.004915230889640017, 0.30146401859790545, 0.4150710938376613]) grid", "12, 6, 11, 14], [0.10006330804326793, 0.03891760159161208, 0.005474465860804227, 0.12962618248625338, 0.03090992138168193, 0.016043163973997736,", "= transit(grid, 7, [13], [1.0]) grid = sin(grid, 3, 12,", "9, 10, -1.8565532127479274, -54.75186223635349) grid = transit(grid, 10, [14], [1.0])", "1, 1, 11) grid = transit(grid, 5, [11, 4, 2,", "0.9804797761207309 + y * -0.5063344373124843) / 2 grid[:,:,3] = (x", "grid = power(grid, 3, 5, 0.10200689258338674) grid = transit(grid, 2,", "14], 2) grid = sin(grid, 4, 5, -1.8457292172108153, -53.43885199947502) grid", "= (x * -0.3391983246964396 + y * -0.5135707069423852) / 2", "0.06343641920215143, 0.038951322931441136, 0.04613309733662021, 0.19750663742298355, 0.16072124228620793, 0.15869932715876592, 0.14757838472737334]) grid = transit(grid,", "grid = transit(grid, 4, [3, 13, 9, 8, 5, 2,", "1.19.5 from PIL import Image #PIL version: 8.1.2 #set initial", "0.15192425697494277, 0.4951725812299663]) grid = sin(grid, 4, 8, 3.386521226555936, 60.95572898751007) grid", "grid = sin(grid, 9, 4, 3.0281102269529683, 11.185401112275173) grid = sin(grid,", "= transit(grid, 8, [6, 2], [0.6857167761482571, 0.31428322385174284]) grid = shift(grid,", "transit(grid, 2, [9, 11, 10], [0.2662646690994658, 0.2460545507972383, 0.4876807801032959]) grid =", "transit(grid, 15, [12, 0, 1, 11], [0.01847979792505241, 0.33442336387003857, 0.15192425697494277, 0.4951725812299663])", "grid = sin(grid, 10, 2, -0.010214061334835559, 20.43114218394348) grid = transit(grid,", "0.12489006625007311, 0.13398690135296518]) grid = transit(grid, 2, [2, 0, 11, 10,", "grid = transit(grid, 4, [14, 11, 12, 13, 4, 7],", "8, [3], [1.0]) grid = inverse(grid, 8, 5) grid =", "= transit(grid, 5, [11, 4, 2, 1, 13, 12, 0,", "= transit(grid, 8, [2, 11, 15, 4, 1, 0, 14],", "0.24627334258742545, 0.04870190081124998, 0.049950480577274, 0.15123046752435387, 0.31255198044446264, 0.04415702829077187]) grid = transit(grid, 1,", "= sin(grid, 2, 5, -5.225820110717917, 57.71107021356826) grid = transit(grid, 2,", "grid = transit(grid, 10, [11, 4, 2, 8, 14], [0.3705316303566195,", "[12, 15, 9, 2, 0, 1, 5], [0.18796556626817826, 0.19260744772691155, 0.11226112831146452,", "+ np.exp(x[:,:,s2_indx] * p)) ** (1/p)) / 1.07 return test_values(res)", "prod(grid, 2, [8, 7, 11, 10, 15, 0, 5]) grid", "0.05113255120007798) / 2 grid[:,:,8] = (x * -0.3391983246964396 + y", "= np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx]) ** p return test_values(res) #set initial", "0.08377067725345017, 0.13045782410775286, 0.02917564277599849, 0.12489006625007311, 0.13398690135296518]) grid = transit(grid, 2, [2,", "15, 14, 9], [0.33493798319460544, 0.14040206011900094, 0.3010385316537353, 0.07412413198773361, 0.14949729304492473]) grid =", "grid = sin(grid, 1, 2, -1.5301674594368837, -60.29431568717391) grid = transit(grid,", "[1, 7, 0, 2, 9, 4, 8], [0.06904450551777742, 0.12680650314665426, 0.1756104206123629,", "= transit(grid, 6, [1, 7, 0, 2, 9, 4, 8],", "0.2914526739617249) / 2 grid[:,:,2] = (x * 0.9804797761207309 + y", "[1.0]) grid = inverse(grid, 8, 5) grid = smooth_max(grid, 10,", "/ 2 grid[:,:,15] = (x * 0.49037959172682255 + y *", "= transit(grid, 6, [15, 8], [0.5303803951305812, 0.4696196048694189]) grid = inverse(grid,", "0.1416941580568898, 0.020968563089492034, 0.0847896752697893, 0.0921589665387646, 0.008240731277180186, 0.17158558178481512]) grid = transit(grid, 5,", "90.63950889076133) grid = sin(grid, 14, 14, -1.842523240371888, 74.23947694195837) grid =", "(1 - shift) - 1) ** (1 / (1 -", "0.9223892145169746) grid = transit(grid, 2, [9, 11, 10], [0.2662646690994658, 0.2460545507972383,", "-0.033265790773207085, 51.94880270063618) grid = smooth_min(grid, 13, 10, 15) grid =", "/ 2 grid[:,:,14] = (x * -0.7665883618456049 + y *", "= transit(grid, 13, [5, 15, 10], [0.13237609957996088, 0.22944646977966682, 0.6381774306403722]) grid", "15, [13, 3], [0.5897775709748927, 0.41022242902510725]) grid = sin(grid, 12, 14,", "0], 2) grid = transit(grid, 13, [11, 0], [0.6569516962992897, 0.3430483037007103])", "grid = inverse(grid, 1, 0) grid = smooth_max(grid, 1, 15,", "transit(grid, 14, [10, 14, 4, 9, 13, 6], [0.3199750359220948, 0.07376266150860299,", "to the grid grid = transit(grid, 4, [7, 6, 12,", "sin(grid, 7, 3, 1.6405444007982959, -37.09230830685477) grid = transit(grid, 9, [8],", "results im = Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__) + '.png') #save layers img", "* 2 - 1 return test_values(res) res = np.zeros((SIZE, SIZE,", "12, 0, 8], [0.08486049729383285, 0.15069099224942706, 0.024923245737924458, 0.07191051851248272, 0.25942601829807205, 0.16834508849259286, 0.14540219911263502,", "= np.zeros((SIZE * 4, SIZE * 4)) for j in", "0, [7, 1, 11, 0, 15], [0.036901331671075975, 0.5054281720479712, 0.13288430351514774, 0.10820806749406277,", "[14, 13, 15], [0.530662002197574, 0.1082014600047566, 0.36113653779766947]) grid = transit(grid, 14,", "(x * -0.3391983246964396 + y * -0.5135707069423852) / 2 grid[:,:,9]", "0: res[:,:,i] = (-np.abs(((x[:,:,i] + 1) / 2) ** (1", "* -0.5864100240508576 + y * -0.9425245660964123) / 2 grid[:,:,14] =", "= prod(grid, 3, [14, 15]) grid = inverse(grid, 5, 5)", "14, 8, -0.4693746108213766, -98.17810769380118) grid = sin(grid, 12, 10, 3.6427863324838423,", "transit(grid, 6, [6, 14], [0.7201753385758813, 0.2798246614241187]) grid = prod(grid, 4,", "6, 1, -1.115193397983063) grid = smooth_max(grid, 13, 3, 8) grid", "10): res = x.copy() res[:,:,t_indx] = -np.log((np.exp(-x[:,:,s1_indx] * p) +", "[0.10006330804326793, 0.03891760159161208, 0.005474465860804227, 0.12962618248625338, 0.03090992138168193, 0.016043163973997736, 0.13259375374543056, 0.09920705802758992, 0.1415090600653345, 0.09597789664069131,", "10, 7], [0.22694849313985146, 0.5162695719847235, 0.25678193487542517]) grid = sin(grid, 9, 9,", "8, 13, 2], [0.32464063956303774, 0.20922781529873477, 0.16179927966914437, 0.30433226546908315]) grid = magnitude(grid,", "0.3307147367708056, 0.26199556841553734, 0.018127231672754242, 0.13788777275073352, 0.01906389787670339]) grid = sin(grid, 4, 7,", "[14], [1.0]) grid = transit(grid, 15, [11, 4, 10], [0.6926745567135898,", "8, [13, 9, 5, 7, 14], [0.05801706264076675, 0.341923243761946, 0.0494872820880747, 0.29583940098242745,", "0.12240760599022518, 0.05066197369764289, 0.13869178538077429, 0.09948828746526778, 0.16686217850764798, 0.09926467338066268]) grid = transit(grid, 6,", "10, 0.7827958631857042, -90.82177259964699) grid = transit(grid, 6, [8, 6, 5,", "14], [0.3705316303566195, 0.1755951969700656, 0.043989590834687294, 0.22866693087969006, 0.1812166509589377]) grid = sin(grid, 4,", "= (img + 1) * 127.5 im = Image.fromarray(np.uint8(img)) im.save(os.path.basename(__file__)", "def shift_colors(x, shift): res = x.copy() for i in range(x.shape[-1]):", "8, 0, 15], 2) grid = prod(grid, 2, [3, 11,", "= shift(grid, 9, 13, -5.367438086043798) grid = magnitude(grid, 13, [2,", "[11, 4, 10], [0.6926745567135898, 0.1831142410590532, 0.12421120222735695]) grid = magnitude(grid, 7,", "0.053526366336325744, 4.147364704932215) grid = transit(grid, 4, [3], [1.0]) grid =", "2) grid = transit(grid, 8, [5, 4, 15, 6, 14,", "0.16060948991238613, 0.12949271785123345, 0.2591612025511646]) grid = transit(grid, 10, [11, 4, 2,", "smooth_min(grid, 0, 5, 1) grid = magnitude(grid, 0, [5, 0],", "0.2671958928603256]) grid = smooth_min(grid, 1, 1, 11) grid = transit(grid,", "10, 10, 0.9558311639914843, -47.618914508652054) grid = shift(grid, 9, 8, -1.1449289879251126)", "12], [0.05731677054419865, 0.08527765171582982, 0.33929504571762287, 0.1932983536368378, 0.0036374435750729187, 0.12289545051895708, 0.19827928429148084]) grid =", "2], [0.6857167761482571, 0.31428322385174284]) grid = shift(grid, 6, 15, 4.115946851379848) grid", "15, [11, 4, 10], [0.6926745567135898, 0.1831142410590532, 0.12421120222735695]) grid = magnitude(grid,", "2, 3.501615294498545, -75.50049353340206) grid = prod(grid, 9, [1, 4, 0,", "transit(grid, 9, [8], [1.0]) grid = sin(grid, 5, 10, -1.5052434957207308,", "[0.1597221050818672, 0.523275926379751, 0.31700196853838186]) grid = sin(grid, 14, 7, 5.409920766787869, -58.09956716630187)", "grid = transit(grid, 10, [9, 8], [0.7777441717493406, 0.22225582825065934]) grid =", "5) grid = transit(grid, 4, [8, 4, 15, 9, 10],", "grid = sin(grid, 10, 9, 6.219381309190064, -71.03631884776823) grid = sin(grid,", "sin(grid, 7, 7, 0.5492744322205282, 35.873568370773654) grid = transit(grid, 7, [13],", "8, 10, 4, 2], [0.43102537693091664, 0.25433300797798253, 0.21618454566402304, 0.046743011673522995, 0.05171405775355483]) grid", "grid = smooth_min(grid, 11, 8, 12) grid = transit(grid, 1,", "0, 7, 8, 9, 12, 6, 11, 14], [0.10006330804326793, 0.03891760159161208,", "[0.2070905138265326, 0.06562120796792839, 0.17355051228662716, 0.05514926535269553, 0.0829726599151083, 0.41561584065110807]) grid = transit(grid, 2,", "5, 3.1584260780059252) grid = transit(grid, 10, [9, 8], [0.7777441717493406, 0.22225582825065934])", "(1 / (1 + shift[i])) + 1) * 2 -", "0.14082681623065177, 0.19859698568682838, 0.4891861295353413]) grid = transit(grid, 13, [12, 15, 9,", "np.sqrt(len(s_indx)) return test_values(res) def shift(x, t_indx, s_indx, shift): res =", "= shift(grid, 3, 9, 3.0393348894939773) grid = shift(grid, 2, 4,", "[1.0]) grid = transit(grid, 5, [9, 13, 3, 14], [0.28064413535886806,", "8, 15, 0, 12, 3]) grid = transit(grid, 13, [5,", "5, [1, 9, 3, 10, 4], [0.24075568684771534, 0.02527375632067568, 0.4828116495090197, 0.09546712897709621,", "1, 12, 9, 0, 8, 15, 2, 10, 14], [0.20381942291270427,", "0.094441440303033]) grid = transit(grid, 11, [12], [1.0]) grid = power(grid,", "shift[i]) - 1) ** (1 / (1 + shift[i])) +", "version: default import numpy as np #Numpy version: 1.19.5 from", "3, 12, 9], [0.13643904772292245, 0.38438336340747, 0.15936221296996333, 0.31981537589964426]) grid = sin(grid,", "0, 0) grid = magnitude(grid, 13, [8], 2) grid =", "= magnitude(grid, 9, [12, 14, 4], 2) grid = shift(grid,", "// 4 img[x*SIZE:(x + 1)*SIZE, y*SIZE:(y+1)*SIZE] = grid[:,:,j] img =", "x.copy() if shift > 0: res[:,:,t_indx] = (-np.abs(((x[:,:,s_indx] + 1)", "0.9386329219527516 + y * -0.45147169454413794) / 2 grid[:,:,1] = (x", "0.2132573540073865, 0.11957266769813353, 0.3209038404419199]) grid = transit(grid, 6, [1, 7, 0,", "-99.85812912291547) grid = transit(grid, 0, [4, 3, 8], [0.23275058190778222, 0.49901982570530873,", "= transit(grid, 5, [11, 10], [0.9817011300708863, 0.018298869929113594]) grid = sin(grid,", "0.4696196048694189]) grid = inverse(grid, 0, 0) grid = magnitude(grid, 13,", "[:,:,i]+ 1) / 2) ** (1 - shift[i]) - 1)", "= 0): res = x.copy() res[:,:,t_indx] = np.sin(x[:,:,s_indx] * 0.5", "(1 - shift)) * 2 - 1 return test_values(res) def", "res[:,:,i] = np.abs((1 - (x [:,:,i]+ 1) / 2) **", "grid = transit(grid, 2, [10, 11, 4, 15, 0, 6],", "10, 2.5947698108630664, -90.74050288622541) grid = sin(grid, 9, 8, -0.8743741598911887, 15.92872484723533)", "[6, 3, 7]) grid = sin(grid, 0, 3, -3.561651028660104, 11.539889679902203)", "15, 9, 6, 11], [0.036102265915692405, 0.1224495166624379, 0.2384660328868578, 0.3357862916746864, 0.2671958928603256]) grid", "transit(grid, 3, [7, 3, 12, 9], [0.13643904772292245, 0.38438336340747, 0.15936221296996333, 0.31981537589964426])", "3, 15, 5, 7], [0.06492287400539203, 0.21223490901058306, 0.36311130408652753, 0.09994467226348329, 0.12833432959710458, 0.1314519110369097])", "magnitude(grid, 14, [4], 2) grid = sin(grid, 1, 5, 8.18216846853571,", "0.2425397323068922, 0.0904752958055755, 0.11683555248582808, 0.30211530092641004]) grid = sin(grid, 5, 2, -2.2972705471452146,", "grid = sin(grid, 10, 2, 0.9155140652310594, -34.1653400637653) grid = transit(grid,", "[3, 9, 2]) grid = sin(grid, 5, 1, 2.0751861425380627, 63.37681521624819)", "* 2).reshape((1, SIZE)).repeat(SIZE, 0) y = ((np.arange(SIZE)/(SIZE-1) - 0.5) *", "0.02841485585755143, 0.19916101840344472, 0.03422984110049058, 0.03597196960697647]) grid = magnitude(grid, 13, [11, 7],", "9, 13, 6], [0.3199750359220948, 0.07376266150860299, 0.03622483092076182, 0.09070212266434277, 0.4030414045204916, 0.07629394446370606]) grid", "15, 11, 9, 12], [0.21908823570589997, 0.1636179110868493, 0.03797238284324163, 0.29532957711092916, 0.2839918932530799]) grid", "= sin(grid, 5, 2, -2.2972705471452146, -12.522748365129786) grid = smooth_min(grid, 12,", "(x * 0.9804797761207309 + y * -0.5063344373124843) / 2 grid[:,:,3]", "#Numpy version: 1.19.5 from PIL import Image #PIL version: 8.1.2", "grid = transit(grid, 2, [0, 4, 2], [0.010597803396528332, 0.7371576932264431, 0.25224450337702853])", "2) grid = transit(grid, 4, [4, 12, 14, 15, 7,", "0.008240731277180186, 0.17158558178481512]) grid = transit(grid, 5, [11, 10], [0.9817011300708863, 0.018298869929113594])", "[10], [1.0]) grid = transit(grid, 1, [8, 10, 15, 14,", "grid = sin(grid, 7, 3, 1.6405444007982959, -37.09230830685477) grid = transit(grid,", "(x * -0.5303146721156469 + y * -0.41048419195488317) / 2 grid[:,:,13]", "grid = sin(grid, 11, 13, -6.909579361872105, 70.84834564082374) grid = transit(grid,", "grid = prod(grid, 3, [14, 15]) grid = inverse(grid, 5,", "grid[:,:,14] = (x * -0.7665883618456049 + y * -0.3867357840809138) /", "= shift(grid, 9, 8, -1.1449289879251126) grid = transit(grid, 7, [4,", "4, 8, 4.28026157040775, -75.14180284322572) grid = prod(grid, 3, [14, 15])", "-0.3391983246964396 + y * -0.5135707069423852) / 2 grid[:,:,9] = (x", "grid = magnitude(grid, 15, [5, 3, 8, 0, 15], 2)", "0, 7.741409383532979, -12.082110529508299) grid = prod(grid, 11, [9]) grid =", "13, 7.718114740496995, 55.242200715207815) grid = sin(grid, 12, 10, -3.1151555334821888, 17.571856948335267)", "2) grid = transit(grid, 13, [6, 2, 3, 15, 5,", "9, 5, 7, 14], [0.05801706264076675, 0.341923243761946, 0.0494872820880747, 0.29583940098242745, 0.2547330105267852]) grid", "1.097917736937588, 58.87772371184383) grid = transit(grid, 11, [9, 11], [0.37033495928182997, 0.6296650407181701])", "grid = smooth_max(grid, 1, 15, 12) grid = prod(grid, 11,", "0.39097327478734406, 0.08978424496428203]) grid = smooth_min(grid, 9, 9, 10) grid =", "(x * 0.49037959172682255 + y * -0.7671554143072785) / 2 #apply", "4, 15, 10, 8, 5, 2, 3], [0.23701292672659616, 0.08316792464084911, 0.017867439461611043,", "grid = sin(grid, 1, 1, -0.183401440709518, -88.40242580975152) grid = transit(grid,", "7], [0.207462236904601, 0.11516125867317799, 0.12240760599022518, 0.05066197369764289, 0.13869178538077429, 0.09948828746526778, 0.16686217850764798, 0.09926467338066268]) grid", "np.zeros((SIZE * 4, SIZE * 4)) for j in range(GRID_CHANNELS):", "-0.5155435342135386) / 2 grid[:,:,4] = (x * -0.6644350461377522 + y", "= sin(grid, 12, 10, 3.6427863324838423, 99.297524709649) grid = sin(grid, 5,", "grid = sin(grid, 13, 13, 7.718114740496995, 55.242200715207815) grid = sin(grid,", "13, 3, -0.15800274281797377, 90.63950889076133) grid = sin(grid, 14, 14, -1.842523240371888,", "7, [4, 10, 1, 13, 5, 0, 7, 8, 9,", "9, 0], [0.24803411847529433, 0.2425397323068922, 0.0904752958055755, 0.11683555248582808, 0.30211530092641004]) grid = sin(grid,", "11, 10, 5, 4, 15, 13], [0.1869735689344564, 0.06343641920215143, 0.038951322931441136, 0.04613309733662021,", "= sin(grid, 14, 7, 5.409920766787869, -58.09956716630187) grid = sin(grid, 2,", "13, 2, 9, 0], [0.24803411847529433, 0.2425397323068922, 0.0904752958055755, 0.11683555248582808, 0.30211530092641004]) grid", "2) ** (1 - shift[i]) - 1) ** (1 /", "[1.0]) grid = sin(grid, 3, 12, 6.470760426148978, -53.62090724330151) grid =", "0.410362436413437]) grid = inverse(grid, 6, 6) grid = sin(grid, 7,", "2, 13, 12, 3, 6], [0.1020239434902293, 0.05405846145210329, 0.11525379082942891, 0.11556721863292163, 0.12372657123165616,", "transit(grid, 5, [11, 10], [0.9817011300708863, 0.018298869929113594]) grid = sin(grid, 14,", "#save results im = Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__) + '.png') #save layers", "13, 15) grid = smooth_max(grid, 5, 8, 4) grid =", "2) grid = sin(grid, 4, 8, 4.28026157040775, -75.14180284322572) grid =", "y * -0.9425245660964123) / 2 grid[:,:,14] = (x * -0.7665883618456049", "5, 0.6814927249849106, 30.75954926767548) grid = inverse(grid, 8, 7) grid =", "power(grid, 3, 5, 0.10200689258338674) grid = transit(grid, 2, [10, 11,", "5], [0.5076634403621766, 0.003404332378773421, 0.04142944289977586, 0.4475027843592742]) grid = inverse(grid, 4, 5)", "sin(grid, 4, 4, 3.47544933993972, -37.11795195118333) grid = sin(grid, 11, 7,", "[7, 3, 12, 9], [0.13643904772292245, 0.38438336340747, 0.15936221296996333, 0.31981537589964426]) grid =", "= smooth_max(grid, 10, 15, 10) grid = transit(grid, 11, [9,", "3)) res += shift_colors(grid[:,:,0:1].repeat(3, -1), [1.9355805467383669, 1.4677093499726706, 1.2451388311186942]) res =", "grid = shift(grid, 9, 13, -5.367438086043798) grid = magnitude(grid, 13,", "2 grid[:,:,14] = (x * -0.7665883618456049 + y * -0.3867357840809138)", "= (x * 0.8435706697714382 + y * 0.7746597063144072) / 2", "0.9558311639914843, -47.618914508652054) grid = shift(grid, 9, 8, -1.1449289879251126) grid =", "2, 3.41043792019894, 65.36615977552518) grid = transit(grid, 0, [14, 3, 11,", "1, 5, 8.18216846853571, -6.729427492311089) grid = magnitude(grid, 11, [8, 2],", "0.09747098994785518, 0.040818441056887325, 0.16796111771248814, 0.07628940657007711]) grid = transit(grid, 3, [11, 1,", "11, 7, -0.3409112713023047, 75.93313567333723) grid = transit(grid, 11, [5, 10,", "7, 0.5492744322205282, 35.873568370773654) grid = transit(grid, 7, [13], [1.0]) grid", "1.2451388311186942]) res = res / 1 res = ((res +", "GRID_CHANNELS)) x = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((1, SIZE)).repeat(SIZE, 0)", "0.4365452266748293) / 2 grid[:,:,7] = (x * 0.5049774961793401 + y", "= inverse(grid, 8, 5) grid = smooth_max(grid, 10, 5, 13)", "sin(grid, 3, 11, -6.496603906160505, -73.75617586359363) grid = transit(grid, 6, [6,", "0.2460545507972383, 0.4876807801032959]) grid = transit(grid, 2, [7], [1.0]) grid =", "1, [12, 13]) grid = sin(grid, 6, 14, -1.927951619591129, -65.3028706482776)", "5, 5, 3.1584260780059252) grid = transit(grid, 10, [9, 8], [0.7777441717493406,", "grid = shift(grid, 8, 1, -0.2952350240798842) grid = sin(grid, 11,", "= transit(grid, 6, [8, 6, 5, 7, 4, 2], [0.39579476392315127,", "grid = sin(grid, 1, 11, 0.5071121900678415, 10.950101187785563) grid = shift(grid,", "sin(grid, 5, 14, -1.45141083652418, -99.85812912291547) grid = transit(grid, 0, [4,", "[0.32464063956303774, 0.20922781529873477, 0.16179927966914437, 0.30433226546908315]) grid = magnitude(grid, 6, [14, 5,", "= magnitude(grid, 9, [15, 3, 11, 0, 14], 2) grid", "8], [0.23275058190778222, 0.49901982570530873, 0.2682295923869092]) grid = magnitude(grid, 8, [10, 9,", "smooth_min(grid, 9, 9, 10) grid = shift(grid, 8, 1, -0.2952350240798842)", "[6, 13, 7], [0.16813621041531998, 0.42150135317124293, 0.410362436413437]) grid = inverse(grid, 6,", "2) grid = magnitude(grid, 9, [15, 3, 11, 0, 14],", "3], [0.30088974760959275, 0.6991102523904072]) grid = transit(grid, 8, [2, 11, 15,", "0.09920705802758992, 0.1415090600653345, 0.09597789664069131, 0.06106766497801195, 0.14032187015082653, 0.008288053054498123]) grid = prod(grid, 15,", "sin(grid, 9, 15, -2.507870105026106, -89.43842740853354) grid = transit(grid, 0, [12,", "4, 2], [0.39579476392315127, 0.3200094081197146, 0.06439062651950353, 0.03284446726347166, 0.04732779189481446, 0.13963294227934445]) grid =", "0: res[:,:,t_indx] = (-np.abs(((x[:,:,s_indx] + 1) / 2) ** (1", "0.1935939805514465, 0.06024872230823076, 0.13457223936247906, 0.32385711085429764]) grid = transit(grid, 1, [7, 2,", "grid transformation methods def transit(x, t_indx, s_indx, alphas): res =", "sin(grid, 13, 3, -0.15800274281797377, 90.63950889076133) grid = sin(grid, 14, 14,", "[12, 0, 1, 11], [0.01847979792505241, 0.33442336387003857, 0.15192425697494277, 0.4951725812299663]) grid =", "0, 11, 10, 5, 4, 15, 13], [0.1869735689344564, 0.06343641920215143, 0.038951322931441136,", "0, 5, 1) grid = magnitude(grid, 0, [5, 0], 2)", "13, 2], [0.32464063956303774, 0.20922781529873477, 0.16179927966914437, 0.30433226546908315]) grid = magnitude(grid, 6,", "35.873568370773654) grid = transit(grid, 7, [13], [1.0]) grid = sin(grid,", "3], [0.5897775709748927, 0.41022242902510725]) grid = sin(grid, 12, 14, 1.097917736937588, 58.87772371184383)", "transit(grid, 13, [13, 0, 5, 14], [0.09662806703796267, 0.1621478194912538, 0.21548762580464817, 0.5257364876661353])", "** (1 / (1 + shift)) + 1) * 2", "grid = transit(grid, 2, [13, 11, 5], [0.421270391024163, 0.5054038923567993, 0.07332571661903758])", "[0.28772794692354614, 0.1935939805514465, 0.06024872230823076, 0.13457223936247906, 0.32385711085429764]) grid = transit(grid, 1, [7,", "np.amin(arr) < -1 or np.amax(arr) > 1: raise Exception('Values went", "[1.0]) grid = transit(grid, 15, [11, 4, 10], [0.6926745567135898, 0.1831142410590532,", "10], [0.13237609957996088, 0.22944646977966682, 0.6381774306403722]) grid = transit(grid, 6, [15], [1.0])", "0], [0.6569516962992897, 0.3430483037007103]) grid = sin(grid, 14, 5, 0.053526366336325744, 4.147364704932215)", "in range(x.shape[-1]): if shift[i] > 0: res[:,:,i] = (-np.abs(((x[:,:,i] +", "7.741409383532979, -12.082110529508299) grid = prod(grid, 11, [9]) grid = sin(grid,", "3.1973516320924773) grid = sin(grid, 9, 7, -2.4657577404884132, 72.95418196004374) grid =", "s_indx, alphas): res = x.copy() res[:,:,t_indx] = np.sum(x[:,:,s_indx] * alphas,", "15) grid = sin(grid, 12, 6, -3.621533174445339, 24.02414911462421) grid =", "0.024092687676923453, 0.02665655056773558, 0.17667886361751853, 0.15211061797378253, 0.016462544099609754, 0.0072484377164178625, 0.4477791048998878, 0.11849249751317383]) grid =", "[0.5897775709748927, 0.41022242902510725]) grid = sin(grid, 12, 14, 1.097917736937588, 58.87772371184383) grid", "2], [0.39579476392315127, 0.3200094081197146, 0.06439062651950353, 0.03284446726347166, 0.04732779189481446, 0.13963294227934445]) grid = smooth_min(grid,", "sin(grid, 8, 10, 2.5947698108630664, -90.74050288622541) grid = sin(grid, 9, 8,", "[0.45073658968521574, 0.16060948991238613, 0.12949271785123345, 0.2591612025511646]) grid = transit(grid, 10, [11, 4,", "[0.3705316303566195, 0.1755951969700656, 0.043989590834687294, 0.22866693087969006, 0.1812166509589377]) grid = sin(grid, 4, 2,", "= transit(grid, 5, [9, 13, 3, 14], [0.28064413535886806, 0.5181512474389621, 0.1504742947642479,", "magnitude(grid, 9, [12, 14, 4], 2) grid = shift(grid, 3,", "res = x.copy() res[:,:,t_indx] = np.sin(x[:,:,s_indx] * 0.5 * np.pi", "7, 3.7705302330112063, 56.91558505626969) grid = sin(grid, 3, 9, 1.4275963527158242, -76.78247379244436)", "[12, 11, 13, 4], [0.1713900685471786, 0.14082681623065177, 0.19859698568682838, 0.4891861295353413]) grid =", "0.08164480089591167, 0.24583958083442445, 0.2244518823086713, 0.13799633398542827]) grid = transit(grid, 11, [0], [1.0])", "10, 14], [0.20381942291270427, 0.07753380798970702, 0.11445683149439734, 0.08475226158626031, 0.1416941580568898, 0.020968563089492034, 0.0847896752697893, 0.0921589665387646,", "0.08978424496428203]) grid = smooth_min(grid, 9, 9, 10) grid = shift(grid,", "-1.1449289879251126) grid = transit(grid, 7, [4, 10, 1, 13, 5,", "0.0921589665387646, 0.008240731277180186, 0.17158558178481512]) grid = transit(grid, 5, [11, 10], [0.9817011300708863,", "grid = sin(grid, 10, 14, 0.8649185298731181, 3.1973516320924773) grid = sin(grid,", "= sin(grid, 4, 10, -3.680544885171134, 30.633332441673872) grid = transit(grid, 11,", "3], [0.9172074355564371, 0.08279256444356292]) grid = transit(grid, 13, [1, 2, 7,", "[0.03500911832175082, 0.03265868671024263, 0.3248025339288217, 0.4234363710484886, 0.13338109758306646, 0.050712192407629864]) grid = transit(grid, 7,", "14, 8], [0.38986786543390084, 0.40057743619803005, 0.20955469836806906]) grid = transit(grid, 9, [5],", "11, 15, 4, 1, 0, 14], [0.29712982335534416, 0.2526657169525107, 0.08415696601637544, 0.18541009701166816,", "14], [0.7201753385758813, 0.2798246614241187]) grid = prod(grid, 4, [10, 0, 2,", "= sin(grid, 4, 3, 0.10154488887533689, 12.479110491961137) grid = magnitude(grid, 1,", "0.16903312106740406, 0.3387613981701764, 0.11303295854369695, 0.13714679001436697]) grid = transit(grid, 4, [14, 11,", "grid = sin(grid, 4, 15, -1.9527829039221054, 20.537776250912316) grid = transit(grid,", "0.05550396325806974, 0.1242259093715456]) grid = smooth_max(grid, 10, 15, 10) grid =", "* -0.5303146721156469 + y * -0.41048419195488317) / 2 grid[:,:,13] =", "= x.copy() res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx], axis = -1, ord =", "if np.isnan(arr).any(): raise Exception('Array has None elements!') if np.amin(arr) <", "0.07412413198773361, 0.14949729304492473]) grid = magnitude(grid, 10, [11, 0, 5], 2)", "0.6296650407181701]) grid = smooth_min(grid, 4, 1, 8) grid = sin(grid,", "= smooth_min(grid, 11, 8, 12) grid = transit(grid, 1, [1,", "0.024264739611302585, 0.0306940545567164, 0.19611241111174804, 0.7173417059926683]) grid = transit(grid, 0, [7, 1,", "7, 15], 2) grid = sin(grid, 12, 7, 1.439019575760617, 13.126437741104823)", "13, 3, 8) grid = transit(grid, 13, [13, 0, 5,", "8, 3.386521226555936, 60.95572898751007) grid = shift(grid, 14, 2, 2.55681173849493) grid", "np.abs((1 - (x[:,:,s_indx] + 1) / 2) ** (1 -", "magnitude(grid, 13, [7, 4, 15], 2) grid = transit(grid, 13,", "transit(grid, 1, [12, 8, 10, 4, 2], [0.43102537693091664, 0.25433300797798253, 0.21618454566402304,", "= sin(grid, 7, 15, -4.9164570678736865, 86.15931416043557) grid = sin(grid, 1,", "space def shift_colors(x, shift): res = x.copy() for i in", "transit(grid, 8, [6, 2], [0.6857167761482571, 0.31428322385174284]) grid = shift(grid, 6,", "transit(grid, 6, [15], [1.0]) grid = sin(grid, 15, 0, -0.033265790773207085,", "= 16 def test_values(arr): if np.isnan(arr).any(): raise Exception('Array has None", "0) grid = magnitude(grid, 13, [8], 2) grid = transit(grid,", "transit(grid, 13, [5, 15, 10], [0.13237609957996088, 0.22944646977966682, 0.6381774306403722]) grid =", "# This program was generated by \"Generative Art Synthesizer\" #", "0.15211061797378253, 0.016462544099609754, 0.0072484377164178625, 0.4477791048998878, 0.11849249751317383]) grid = transit(grid, 10, [5,", "8, [2, 11, 15, 4, 1, 0, 14], [0.29712982335534416, 0.2526657169525107,", "smooth_min(grid, 4, 1, 8) grid = sin(grid, 4, 4, 3.47544933993972,", "13, [15, 5, 9, 4, 6, 12], [0.18067242214638962, 0.12939497982917472, 0.08164480089591167,", "3, -0.1377650382373763, -96.34412250071645) grid = sin(grid, 7, 3, 1.6405444007982959, -37.09230830685477)", "transit(grid, 8, [11, 15, 0], [0.08195235243098883, 0.6796005904358621, 0.23844705713314918]) grid =", "grid = transit(grid, 11, [12], [1.0]) grid = power(grid, 3,", "[0.03047869593495055, 0.024092687676923453, 0.02665655056773558, 0.17667886361751853, 0.15211061797378253, 0.016462544099609754, 0.0072484377164178625, 0.4477791048998878, 0.11849249751317383]) grid", "transit(grid, 8, [3, 15, 9, 6, 11], [0.036102265915692405, 0.1224495166624379, 0.2384660328868578,", "13, [6, 2, 3, 15, 5, 7], [0.06492287400539203, 0.21223490901058306, 0.36311130408652753,", "grid[:,:,6] = (x * 0.2265055481768512 + y * 0.4365452266748293) /", "s2_indx, p = 10): res = x.copy() res[:,:,t_indx] = -np.log((np.exp(-x[:,:,s1_indx]", "shift) - 1) ** (1 / (1 - shift)) *", "y * -0.7671554143072785) / 2 #apply transformations to the grid", "sin(grid, 4, 8, 3.386521226555936, 60.95572898751007) grid = shift(grid, 14, 2,", "12, 4, -1.6398586072056767, 84.51374680259704) grid = sin(grid, 1, 1, -0.183401440709518,", "3, -2.5681840787633137, -30.256455817944243) grid = sin(grid, 8, 2, 3.501615294498545, -75.50049353340206)", "= inverse(grid, 7, 8) grid = smooth_max(grid, 10, 3, 15)", "grid = sin(grid, 12, 4, -1.6398586072056767, 84.51374680259704) grid = sin(grid,", "* p)) ** (1/p)) / 1.07 return test_values(res) def smooth_min(x,", "grid = transit(grid, 4, [8, 4, 15, 9, 10], [0.10267794314653868,", "= transit(grid, 3, [6, 14, 0, 3, 15, 4, 2,", "[4, 6, 1, 0], 2) grid = transit(grid, 13, [11,", "= transit(grid, 2, [12], [1.0]) grid = prod(grid, 14, [11,", "= transit(grid, 11, [2], [1.0]) #create color space def shift_colors(x,", "(x * 0.5049774961793401 + y * 0.05113255120007798) / 2 grid[:,:,8]", "= sin(grid, 15, 0, -0.033265790773207085, 51.94880270063618) grid = smooth_min(grid, 13,", "09:20:21 UTC # GAS md5 hash: ad55481e87ca5a7e9a8e92cd336d1cad # Python version:", "For more information visit: https://github.com/volotat/GAS #import python libraries import os", "[0, 15, 10], [0.005204838856346087, 0.5116602651328436, 0.48313489601081044]) grid = transit(grid, 10,", "9, 2], [0.5001532946669459, 0.42070604285213226, 0.07914066248092186]) grid = inverse(grid, 5, 12)", "[14, 11, 12, 13, 4, 7], [0.23221079251346607, 0.3307147367708056, 0.26199556841553734, 0.018127231672754242,", "10, [5, 11, 15, 8, 2, 13, 12, 3, 6],", "0.4951725812299663]) grid = sin(grid, 4, 8, 3.386521226555936, 60.95572898751007) grid =", "= j % 4 y = j // 4 img[x*SIZE:(x", "-0.3867357840809138) / 2 grid[:,:,15] = (x * 0.49037959172682255 + y", "t_indx, s_indx): res = x.copy() res[:,:,t_indx] = np.prod(x[:,:,s_indx], -1) return", "3, [15, 11, 2, 8, 0], [0.28772794692354614, 0.1935939805514465, 0.06024872230823076, 0.13457223936247906,", "y * -0.41048419195488317) / 2 grid[:,:,13] = (x * -0.5864100240508576", "sin(grid, 12, 14, 1.097917736937588, 58.87772371184383) grid = transit(grid, 11, [9,", "inverse(grid, 1, 0) grid = smooth_max(grid, 1, 15, 12) grid", "grid = transit(grid, 8, [3], [1.0]) grid = inverse(grid, 8,", "transit(grid, 3, [15, 11, 2, 8, 0], [0.28772794692354614, 0.1935939805514465, 0.06024872230823076,", "transit(grid, 8, [2, 11, 15, 4, 1, 0, 14], [0.29712982335534416,", "generated by \"Generative Art Synthesizer\" # Generation date: 2021-11-28 09:21:40", "/ 2 * 255).clip(0,255) #save results im = Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__)", "[11, 0], [0.6569516962992897, 0.3430483037007103]) grid = sin(grid, 14, 5, 0.053526366336325744,", "9, 12], [0.21908823570589997, 0.1636179110868493, 0.03797238284324163, 0.29532957711092916, 0.2839918932530799]) grid = sin(grid,", "[1.0]) grid = sin(grid, 15, 0, -0.033265790773207085, 51.94880270063618) grid =", "2, 6, 1, 4, 0], [0.2070905138265326, 0.06562120796792839, 0.17355051228662716, 0.05514926535269553, 0.0829726599151083,", "= sin(grid, 10, 0, 0.5112825397666086, 37.95950546335726) grid = sin(grid, 12,", "[9, 11], [0.37033495928182997, 0.6296650407181701]) grid = smooth_min(grid, 4, 1, 8)", "= x.copy() for i in range(x.shape[-1]): if shift[i] > 0:", "= smooth_min(grid, 7, 12, 0) grid = transit(grid, 2, [1,", "(1 + shift)) + 1) * 2 - 1 if", "smooth_max(grid, 8, 11, 15) grid = sin(grid, 12, 6, -3.621533174445339,", "11, 13, -6.909579361872105, 70.84834564082374) grid = transit(grid, 2, [11, 7,", "grid = sin(grid, 6, 3, -0.1377650382373763, -96.34412250071645) grid = sin(grid,", "0.11849249751317383]) grid = transit(grid, 10, [5, 11, 15, 8, 2,", "grid = inverse(grid, 5, 12) grid = sin(grid, 10, 2,", "inverse(grid, 5, 12) grid = sin(grid, 10, 2, 0.9155140652310594, -34.1653400637653)", "9, 2]) grid = sin(grid, 5, 1, 2.0751861425380627, 63.37681521624819) grid", "0, 12, 3]) grid = transit(grid, 13, [5, 15, 10],", "15, 5], 2) grid = magnitude(grid, 9, [12, 14, 4],", "= sin(grid, 12, 6, -3.621533174445339, 24.02414911462421) grid = sin(grid, 1,", "= (x * -0.8484277738516293 + y * -0.5155435342135386) / 2", "[2, 4, 13]) grid = transit(grid, 5, [1, 9, 3,", "-1.5301674594368837, -60.29431568717391) grid = transit(grid, 2, [13, 11, 5], [0.421270391024163,", "13]) grid = transit(grid, 5, [1, 9, 3, 10, 4],", "3, 9, 5], [0.24039798004748805, 0.2886075990223525, 0.18742374307846998, 0.11615833154358073, 0.16741234630810867]) grid =", "2, [7], [1.0]) grid = sin(grid, 10, 9, 6.219381309190064, -71.03631884776823)", "6, 1, 4, 0], [0.2070905138265326, 0.06562120796792839, 0.17355051228662716, 0.05514926535269553, 0.0829726599151083, 0.41561584065110807])", "8, [11, 15, 0], [0.08195235243098883, 0.6796005904358621, 0.23844705713314918]) grid = power(grid,", "7, 11, 10, 15, 0, 5]) grid = transit(grid, 11,", "= transit(grid, 2, [7], [1.0]) grid = sin(grid, 10, 9,", "test_values(res) def shift(x, t_indx, s_indx, shift): res = x.copy() if", "5, -5.225820110717917, 57.71107021356826) grid = transit(grid, 2, [12], [1.0]) grid", "4, 10, -3.680544885171134, 30.633332441673872) grid = transit(grid, 11, [12, 6,", "2) grid = sin(grid, 9, 5, -5.606152225672729, -35.928477282758536) grid =", "0.09214420047882232]) grid = smooth_max(grid, 1, 0, 1) grid = sin(grid,", "grid = transit(grid, 5, [11, 4, 2, 1, 13, 12,", "4], 2) grid = shift(grid, 3, 9, 3.0393348894939773) grid =", "6, 1.576100090732909, -21.508000199215132) grid = shift(grid, 11, 5, 1.0526879494498724) grid", "grid = sin(grid, 12, 10, 3.6427863324838423, 99.297524709649) grid = sin(grid,", "grid[:,:,1] = (x * 0.8090860808441245 + y * 0.2914526739617249) /", "[1.0]) grid = transit(grid, 15, [15], [1.0]) grid = prod(grid,", "3, -3.561651028660104, 11.539889679902203) grid = power(grid, 10, 5, 0.12539493928522222) grid", "= prod(grid, 15, [3, 5, 0, 1]) grid = sin(grid,", "0.5492744322205282, 35.873568370773654) grid = transit(grid, 7, [13], [1.0]) grid =", "4, [1, 12, 15, 13, 3], [0.32356965941479515, 0.022696478437764827, 0.2132573540073865, 0.11957266769813353,", "= x.copy() res[:,:,t_indx] = np.sin(x[:,:,s_indx] * 0.5 * np.pi *", "grid = smooth_max(grid, 8, 10, 6) grid = prod(grid, 3,", "(-np.abs(((x[:,:,i] + 1) / 2) ** (1 + shift[i]) -", "prod(grid, 14, [13]) grid = sin(grid, 1, 12, -0.5111321725063378, 18.261359970959475)", "'.png') #save layers img = np.zeros((SIZE * 4, SIZE *", "6, 7]) grid = transit(grid, 8, [3], [1.0]) grid =", "grid = transit(grid, 6, [15, 8], [0.5303803951305812, 0.4696196048694189]) grid =", "SIZE = 768 GRID_CHANNELS = 16 def test_values(arr): if np.isnan(arr).any():", "(x * 0.8435706697714382 + y * 0.7746597063144072) / 2 grid[:,:,12]", "[0.6857167761482571, 0.31428322385174284]) grid = shift(grid, 6, 15, 4.115946851379848) grid =", "magnitude(grid, 13, [2, 0], 2) grid = transit(grid, 13, [6,", "8, -0.8743741598911887, 15.92872484723533) grid = transit(grid, 4, [3, 13, 9,", "5, 1, 2.0751861425380627, 63.37681521624819) grid = smooth_min(grid, 11, 10, 9)", "transit(grid, 15, [7, 3], [0.9172074355564371, 0.08279256444356292]) grid = transit(grid, 13,", "grid = smooth_min(grid, 13, 10, 15) grid = transit(grid, 1,", "7, 4, 2], [0.39579476392315127, 0.3200094081197146, 0.06439062651950353, 0.03284446726347166, 0.04732779189481446, 0.13963294227934445]) grid", "0.003404332378773421, 0.04142944289977586, 0.4475027843592742]) grid = inverse(grid, 4, 5) grid =", "2 grid[:,:,5] = (x * -0.5986715486203882 + y * 0.9515468928881716)", "[0.7201753385758813, 0.2798246614241187]) grid = prod(grid, 4, [10, 0, 2, 4,", "= transit(grid, 14, [14, 13, 15], [0.530662002197574, 0.1082014600047566, 0.36113653779766947]) grid", "= 768 GRID_CHANNELS = 16 def test_values(arr): if np.isnan(arr).any(): raise", "= magnitude(grid, 11, [13, 10, 12, 2, 11, 14], 2)", "4, 2], [0.43102537693091664, 0.25433300797798253, 0.21618454566402304, 0.046743011673522995, 0.05171405775355483]) grid = sin(grid,", "-x[:,:,s_indx] return test_values(res) def smooth_max(x, t_indx, s1_indx, s2_indx, p =", "grid = sin(grid, 12, 7, 1.439019575760617, 13.126437741104823) grid = transit(grid,", "15, [12, 15]) grid = prod(grid, 8, [11, 7, 4,", "-3.680544885171134, 30.633332441673872) grid = transit(grid, 11, [12, 6, 9], [0.1597221050818672,", "= j // 4 img[x*SIZE:(x + 1)*SIZE, y*SIZE:(y+1)*SIZE] = grid[:,:,j]", "3, [11, 1, 12, 9, 0, 8, 15, 2, 10,", "(1 + shift) - 1) ** (1 / (1 +", "= x.copy() res[:,:,t_indx] = np.log((np.exp(x[:,:,s1_indx] * p) + np.exp(x[:,:,s2_indx] *", "5) grid = magnitude(grid, 14, [4, 6, 1, 0], 2)", "sin(grid, 2, 5, -5.225820110717917, 57.71107021356826) grid = transit(grid, 2, [12],", "8, 7) grid = prod(grid, 10, [5, 2]) grid =", "2, 7], [0.45073658968521574, 0.16060948991238613, 0.12949271785123345, 0.2591612025511646]) grid = transit(grid, 10,", "4, 9, 0.2366252211469413, -40.63773874328931) grid = sin(grid, 9, 15, -2.507870105026106,", "4, 7], [0.23221079251346607, 0.3307147367708056, 0.26199556841553734, 0.018127231672754242, 0.13788777275073352, 0.01906389787670339]) grid =", "13) grid = magnitude(grid, 5, [7], 2) grid = transit(grid,", "= smooth_min(grid, 0, 5, 1) grid = magnitude(grid, 0, [5,", "x.copy() res[:,:,t_indx] = np.log((np.exp(x[:,:,s1_indx] * p) + np.exp(x[:,:,s2_indx] * p))", "0.20047556686480725, 0.09921434949484752, 0.05399039482501285]) grid = transit(grid, 9, [5], [1.0]) grid", "= transit(grid, 10, [11, 4, 2, 8, 14], [0.3705316303566195, 0.1755951969700656,", "3.47544933993972, -37.11795195118333) grid = sin(grid, 11, 7, -0.3409112713023047, 75.93313567333723) grid", "= 1): res = x.copy() res[:,:,t_indx] = np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx])", "14]) grid = prod(grid, 9, [10, 11, 8, 15, 0,", "x.copy() res[:,:,t_indx] = -np.log((np.exp(-x[:,:,s1_indx] * p) + np.exp(-x[:,:,s2_indx] * p))", "2) grid = sin(grid, 4, 5, -1.8457292172108153, -53.43885199947502) grid =", "5.409920766787869, -58.09956716630187) grid = sin(grid, 2, 15, -2.5319898824657017, -45.01904701883333) grid", "0.31981537589964426]) grid = sin(grid, 10, 3, -2.5681840787633137, -30.256455817944243) grid =", "0.15869932715876592, 0.14757838472737334]) grid = transit(grid, 2, [1, 7], [0.18247956114317448, 0.8175204388568255])", "[3, 11, 1]) grid = smooth_min(grid, 3, 2, 7) grid", "0.068511863728177, 0.10141059844877331, 0.2728285912351676, 0.036877533709020166]) grid = transit(grid, 7, [11], [1.0])", "grid = sin(grid, 5, 2, -2.2972705471452146, -12.522748365129786) grid = smooth_min(grid,", "transit(grid, 9, [5], [1.0]) grid = shift(grid, 9, 13, -5.367438086043798)", "-1.8457292172108153, -53.43885199947502) grid = sin(grid, 10, 0, 7.741409383532979, -12.082110529508299) grid", "prod(grid, 0, [0, 1, 2, 14]) grid = prod(grid, 9,", "1, 15, 12) grid = prod(grid, 11, [3]) grid =", "8, 0], [0.28772794692354614, 0.1935939805514465, 0.06024872230823076, 0.13457223936247906, 0.32385711085429764]) grid = transit(grid,", "grid = sin(grid, 8, 10, 2.5947698108630664, -90.74050288622541) grid = sin(grid,", "13, 4], [0.1713900685471786, 0.14082681623065177, 0.19859698568682838, 0.4891861295353413]) grid = transit(grid, 13,", "12, 13, 4, 7], [0.23221079251346607, 0.3307147367708056, 0.26199556841553734, 0.018127231672754242, 0.13788777275073352, 0.01906389787670339])", "-1 or np.amax(arr) > 1: raise Exception('Values went to far!", "0, 8], [0.08486049729383285, 0.15069099224942706, 0.024923245737924458, 0.07191051851248272, 0.25942601829807205, 0.16834508849259286, 0.14540219911263502, 0.094441440303033])", "grid = smooth_min(grid, 0, 13, 15) grid = smooth_max(grid, 5,", "[9, 8], [0.7777441717493406, 0.22225582825065934]) grid = transit(grid, 3, [9], [1.0])", "10) grid = shift(grid, 6, 1, -1.115193397983063) grid = smooth_max(grid,", "range(GRID_CHANNELS): x = j % 4 y = j //", "= transit(grid, 9, [8], [1.0]) grid = sin(grid, 5, 10,", "2): res = x.copy() res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx], axis = -1,", "+ y * -0.5155435342135386) / 2 grid[:,:,4] = (x *", "3, 3, 2.4622222565241207) grid = sin(grid, 10, 0, 0.5112825397666086, 37.95950546335726)", "3, -0.15800274281797377, 90.63950889076133) grid = sin(grid, 14, 14, -1.842523240371888, 74.23947694195837)", "= sin(grid, 6, 14, -1.927951619591129, -65.3028706482776) grid = prod(grid, 14,", "= prod(grid, 3, [2, 6, 10, 7, 4]) grid =", "grid = transit(grid, 7, [11], [1.0]) grid = transit(grid, 5,", "= transit(grid, 13, [6, 15, 11, 9, 12], [0.21908823570589997, 0.1636179110868493,", "1.4275963527158242, -76.78247379244436) grid = sin(grid, 2, 5, -5.225820110717917, 57.71107021356826) grid", "information visit: https://github.com/volotat/GAS #import python libraries import os #OS version:", "0.05171405775355483]) grid = sin(grid, 10, 10, 0.9558311639914843, -47.618914508652054) grid =", "0.6991102523904072]) grid = transit(grid, 8, [2, 11, 15, 4, 1,", "= shift(grid, 11, 5, 1.0526879494498724) grid = transit(grid, 1, [14],", "0, 5, 14], [0.09662806703796267, 0.1621478194912538, 0.21548762580464817, 0.5257364876661353]) grid = inverse(grid,", "= transit(grid, 0, [4, 3, 8], [0.23275058190778222, 0.49901982570530873, 0.2682295923869092]) grid", "sin(grid, 3, 9, 1.4275963527158242, -76.78247379244436) grid = sin(grid, 2, 5,", "res += shift_colors(grid[:,:,0:1].repeat(3, -1), [1.9355805467383669, 1.4677093499726706, 1.2451388311186942]) res = res", "0.17158558178481512]) grid = transit(grid, 5, [11, 10], [0.9817011300708863, 0.018298869929113594]) grid", "[12, 13]) grid = sin(grid, 6, 14, -1.927951619591129, -65.3028706482776) grid", "res = x.copy() res[:,:,t_indx] = -np.log((np.exp(-x[:,:,s1_indx] * p) + np.exp(-x[:,:,s2_indx]", "-0.6817079327248272) / 2 grid[:,:,11] = (x * 0.8435706697714382 + y", "5, 5) grid = transit(grid, 4, [8, 4, 15, 9,", "= sin(grid, 6, 11, -0.7697482296056479, 23.55348445076298) grid = sin(grid, 7,", "1, 1, -0.183401440709518, -88.40242580975152) grid = transit(grid, 12, [3, 13,", "= prod(grid, 2, [3, 11, 1]) grid = smooth_min(grid, 3,", "shift(grid, 9, 8, -1.1449289879251126) grid = transit(grid, 7, [4, 10,", "#create color space def shift_colors(x, shift): res = x.copy() for", "# Python version: 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC", "9) grid = sin(grid, 13, 2, 4.295107938126156, 57.378601701270014) grid =", "0, 1, 5], [0.18796556626817826, 0.19260744772691155, 0.11226112831146452, 0.08161640805634696, 0.08706050582840198, 0.2243337708440404, 0.11415517296465624])", "grid = magnitude(grid, 0, [4, 13], 2) grid = transit(grid,", "= transit(grid, 2, [0, 15, 10], [0.005204838856346087, 0.5116602651328436, 0.48313489601081044]) grid", "test_values(res) def inverse(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx] =", "transit(grid, 5, [11, 4, 2, 1, 13, 12, 0, 8],", "11, 5) grid = magnitude(grid, 14, [4, 6, 1, 0],", "np.amax(arr) > 1: raise Exception('Values went to far! [ %.2f", "grid grid = np.zeros((SIZE, SIZE, GRID_CHANNELS)) x = ((np.arange(SIZE)/(SIZE-1) -", "res = x.copy() if shift > 0: res[:,:,t_indx] = (-np.abs(((x[:,:,s_indx]", "+ 1) / 2) ** (1 + shift[i]) - 1)", "grid = magnitude(grid, 13, [7, 4, 15], 2) grid =", "* -0.7665883618456049 + y * -0.3867357840809138) / 2 grid[:,:,15] =", "4], [0.03047869593495055, 0.024092687676923453, 0.02665655056773558, 0.17667886361751853, 0.15211061797378253, 0.016462544099609754, 0.0072484377164178625, 0.4477791048998878, 0.11849249751317383])", "grid = smooth_min(grid, 9, 9, 10) grid = shift(grid, 8,", "2021-11-28 09:20:21 UTC # GAS md5 hash: ad55481e87ca5a7e9a8e92cd336d1cad # Python", "5, 0.12539493928522222) grid = power(grid, 0, 12, 2.5526439221510495) grid =", "grid = transit(grid, 11, [9, 11], [0.37033495928182997, 0.6296650407181701]) grid =", "7, 13], [0.3629247592109436, 0.10073172896374764, 0.5363435118253088]) grid = sin(grid, 1, 5,", "0.15069099224942706, 0.024923245737924458, 0.07191051851248272, 0.25942601829807205, 0.16834508849259286, 0.14540219911263502, 0.094441440303033]) grid = transit(grid,", "2, 4, 2.1961962516242517) grid = prod(grid, 15, [3, 5, 0,", "24.900059771988836) grid = sin(grid, 8, 10, 2.5947698108630664, -90.74050288622541) grid =", "0.8649185298731181, 3.1973516320924773) grid = sin(grid, 9, 7, -2.4657577404884132, 72.95418196004374) grid", "0.5 * np.pi * scale + shift) return test_values(res) def", "= transit(grid, 13, [12, 15, 9, 2, 0, 1, 5],", "[0.05731677054419865, 0.08527765171582982, 0.33929504571762287, 0.1932983536368378, 0.0036374435750729187, 0.12289545051895708, 0.19827928429148084]) grid = transit(grid,", "grid = sin(grid, 4, 2, -3.329894296119046, -76.41676919069447) grid = smooth_min(grid,", "0.26199556841553734, 0.018127231672754242, 0.13788777275073352, 0.01906389787670339]) grid = sin(grid, 4, 7, 3.7705302330112063,", "[0.29345909580747953, 0.7065409041925205]) grid = sin(grid, 12, 4, -1.6398586072056767, 84.51374680259704) grid", "magnitude(grid, 5, [7], 2) grid = transit(grid, 6, [9, 11,", "0.04613309733662021, 0.19750663742298355, 0.16072124228620793, 0.15869932715876592, 0.14757838472737334]) grid = transit(grid, 2, [1,", "0.9515468928881716) / 2 grid[:,:,6] = (x * 0.2265055481768512 + y", "11, 15, 8, 2, 13, 12, 3, 6], [0.1020239434902293, 0.05405846145210329,", "grid = transit(grid, 7, [13], [1.0]) grid = sin(grid, 3,", "2.5526439221510495) grid = sin(grid, 4, 10, -3.680544885171134, 30.633332441673872) grid =", "np.abs(x[:,:,s_indx]) ** p return test_values(res) #set initial grid grid =", "-3.561651028660104, 11.539889679902203) grid = power(grid, 10, 5, 0.12539493928522222) grid =", "4, 2, -3.329894296119046, -76.41676919069447) grid = smooth_min(grid, 11, 8, 12)", "2) grid = transit(grid, 12, [8, 11, 3], [0.2717231795161624, 0.38648847983305307,", "15, [5, 3, 8, 0, 15], 2) grid = prod(grid,", "1]) grid = sin(grid, 6, 11, -0.7697482296056479, 23.55348445076298) grid =", "0.13457223936247906, 0.32385711085429764]) grid = transit(grid, 1, [7, 2, 6, 1,", "0.4150710938376613]) grid = sin(grid, 3, 11, -6.496603906160505, -73.75617586359363) grid =", "= shift(grid, 8, 1, -0.2952350240798842) grid = sin(grid, 11, 6,", "1, -0.2952350240798842) grid = sin(grid, 11, 6, 1.576100090732909, -21.508000199215132) grid", "1], [0.05863158300898051, 0.3467981515651057, 0.262107802795733, 0.038001653167336905, 0.2112967596903696, 0.002128256606899112, 0.08103579316557531]) grid =", "1], [0.20378471182464508, 0.038241020379710625, 0.16903312106740406, 0.3387613981701764, 0.11303295854369695, 0.13714679001436697]) grid = transit(grid,", "= inverse(grid, 6, 6) grid = sin(grid, 7, 15, -4.9164570678736865,", "sin(grid, 8, 2, 3.501615294498545, -75.50049353340206) grid = prod(grid, 9, [1,", "python libraries import os #OS version: default import numpy as", "65.36615977552518) grid = transit(grid, 0, [14, 3, 11, 10, 7],", "-0.5986715486203882 + y * 0.9515468928881716) / 2 grid[:,:,6] = (x", "sin(grid, 4, 8, 4.28026157040775, -75.14180284322572) grid = prod(grid, 3, [14,", "grid = inverse(grid, 8, 7) grid = prod(grid, 10, [5,", "smooth_min(grid, 12, 9, 11) grid = sin(grid, 4, 15, -1.9527829039221054,", "[0.28064413535886806, 0.5181512474389621, 0.1504742947642479, 0.050730322437922]) grid = prod(grid, 1, [12, 13])", "prod(grid, 13, [6, 3, 7]) grid = sin(grid, 0, 3,", "def sin(x, t_indx, s_indx, scale = 1, shift = 0):", "** (1 / (1 - shift)) * 2 - 1", "0, 13], [0.2785496566747933, 0.004915230889640017, 0.30146401859790545, 0.4150710938376613]) grid = sin(grid, 3,", "0.15974656746239488, 0.027776085211312595, 0.02330072841260748, 0.20156117996836745]) grid = smooth_min(grid, 0, 5, 1)", "= transit(grid, 15, [15], [1.0]) grid = prod(grid, 13, [6,", "= 10): res = x.copy() res[:,:,t_indx] = np.log((np.exp(x[:,:,s1_indx] * p)", "grid = smooth_max(grid, 10, 5, 13) grid = sin(grid, 9,", "res[:,:,i] = (-np.abs(((x[:,:,i] + 1) / 2) ** (1 +", "99.297524709649) grid = sin(grid, 5, 14, -1.45141083652418, -99.85812912291547) grid =", "transit(grid, 11, [0, 9], [0.1290607634325389, 0.8709392365674611]) grid = transit(grid, 14,", "9, 3.0393348894939773) grid = shift(grid, 2, 4, 2.1961962516242517) grid =", "grid = transit(grid, 2, [9, 11, 10], [0.2662646690994658, 0.2460545507972383, 0.4876807801032959])", "15], 2) grid = sin(grid, 12, 7, 1.439019575760617, 13.126437741104823) grid", "p) + np.exp(-x[:,:,s2_indx] * p)) ** (1/p)) / 1.07 return", "[6, 14], [0.7201753385758813, 0.2798246614241187]) grid = prod(grid, 4, [10, 0,", "0.13869178538077429, 0.09948828746526778, 0.16686217850764798, 0.09926467338066268]) grid = transit(grid, 6, [6, 13,", "magnitude(grid, 7, [6, 12, 7, 13, 8], 2) grid =", "res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx], axis = -1, ord = ord) /", "grid = sin(grid, 3, 12, -4.078686662791614, 24.459526349523884) grid = inverse(grid,", "transit(grid, 4, [4, 12, 14, 15, 7, 1], [0.20378471182464508, 0.038241020379710625,", "0.2547330105267852]) grid = inverse(grid, 11, 5) grid = magnitude(grid, 14,", "0.7173417059926683]) grid = transit(grid, 0, [7, 1, 11, 0, 15],", "transit(grid, 11, [12], [1.0]) grid = power(grid, 3, 5, 0.10200689258338674)", "[0.1020239434902293, 0.05405846145210329, 0.11525379082942891, 0.11556721863292163, 0.12372657123165616, 0.1356897031789931, 0.20047556686480725, 0.09921434949484752, 0.05399039482501285]) grid", "= shift(grid, 6, 15, 4.115946851379848) grid = transit(grid, 15, [13,", "10, 3.6427863324838423, 99.297524709649) grid = sin(grid, 5, 14, -1.45141083652418, -99.85812912291547)", "= transit(grid, 10, [9, 8], [0.7777441717493406, 0.22225582825065934]) grid = transit(grid,", "grid = smooth_min(grid, 3, 2, 7) grid = smooth_max(grid, 8,", "5, 0, 7, 8, 9, 12, 6, 11, 14], [0.10006330804326793,", "14], [0.20381942291270427, 0.07753380798970702, 0.11445683149439734, 0.08475226158626031, 0.1416941580568898, 0.020968563089492034, 0.0847896752697893, 0.0921589665387646, 0.008240731277180186,", "= sin(grid, 4, 8, 3.386521226555936, 60.95572898751007) grid = shift(grid, 14,", "3, 8], [0.23275058190778222, 0.49901982570530873, 0.2682295923869092]) grid = magnitude(grid, 8, [10,", "(x * -0.4262457935185371 + y * -0.6817079327248272) / 2 grid[:,:,11]", "-1.927951619591129, -65.3028706482776) grid = prod(grid, 14, [13]) grid = sin(grid,", "grid = sin(grid, 4, 8, 4.28026157040775, -75.14180284322572) grid = prod(grid,", "grid = prod(grid, 0, [0, 1, 2, 14]) grid =", "transit(grid, 11, [12, 6, 9], [0.1597221050818672, 0.523275926379751, 0.31700196853838186]) grid =", "= transit(grid, 6, [6, 14], [0.7201753385758813, 0.2798246614241187]) grid = prod(grid,", "grid = magnitude(grid, 6, [14, 5, 13, 11, 2, 9],", "= smooth_max(grid, 13, 3, 8) grid = transit(grid, 13, [13,", "2 - 1 return test_values(res) def inverse(x, t_indx, s_indx): res", "[5, 10, 7], [0.22694849313985146, 0.5162695719847235, 0.25678193487542517]) grid = sin(grid, 9,", "grid = magnitude(grid, 5, [7], 2) grid = transit(grid, 6,", "[0.38986786543390084, 0.40057743619803005, 0.20955469836806906]) grid = transit(grid, 9, [5], [1.0]) grid", "[1.0]) grid = sin(grid, 5, 10, -1.5052434957207308, 24.900059771988836) grid =", "58.87772371184383) grid = transit(grid, 11, [9, 11], [0.37033495928182997, 0.6296650407181701]) grid", "magnitude(grid, 9, [15, 7], 2) grid = transit(grid, 4, [4,", "12], [0.21908823570589997, 0.1636179110868493, 0.03797238284324163, 0.29532957711092916, 0.2839918932530799]) grid = sin(grid, 4,", "0.1756104206123629, 0.013987480750913602, 0.1337935702206657, 0.39097327478734406, 0.08978424496428203]) grid = smooth_min(grid, 9, 9,", "1 return test_values(res) def inverse(x, t_indx, s_indx): res = x.copy()", "0.3378766591098989, 0.15974656746239488, 0.027776085211312595, 0.02330072841260748, 0.20156117996836745]) grid = smooth_min(grid, 0, 5,", "grid = transit(grid, 14, [14, 13, 15], [0.530662002197574, 0.1082014600047566, 0.36113653779766947])", "7, [14, 2, 13, 1, 11, 3, 8, 7], [0.207462236904601,", "[9, 11, 10], [0.2662646690994658, 0.2460545507972383, 0.4876807801032959]) grid = transit(grid, 2,", "grid = inverse(grid, 6, 6) grid = sin(grid, 7, 15,", "#PIL version: 8.1.2 #set initial params SIZE = 768 GRID_CHANNELS", "Python version: 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900", "= transit(grid, 13, [15, 5, 9, 4, 6, 12], [0.18067242214638962,", "[0.06492287400539203, 0.21223490901058306, 0.36311130408652753, 0.09994467226348329, 0.12833432959710458, 0.1314519110369097]) grid = transit(grid, 8,", "0.040818441056887325, 0.16796111771248814, 0.07628940657007711]) grid = transit(grid, 3, [11, 1, 12,", "9, 10) grid = shift(grid, 8, 1, -0.2952350240798842) grid =", "= smooth_min(grid, 11, 10, 9) grid = sin(grid, 13, 2,", "= sin(grid, 10, 2, -0.010214061334835559, 20.43114218394348) grid = transit(grid, 8,", "9, 2.766857264282361) grid = transit(grid, 3, [6, 14, 0, 3,", "0.21548762580464817, 0.5257364876661353]) grid = inverse(grid, 1, 0) grid = smooth_max(grid,", "72.95418196004374) grid = transit(grid, 12, [7, 4, 10, 5], [0.5076634403621766,", "grid = sin(grid, 4, 5, -1.8457292172108153, -53.43885199947502) grid = sin(grid,", "transit(grid, 2, [13, 11, 5], [0.421270391024163, 0.5054038923567993, 0.07332571661903758]) grid =", "= prod(grid, 14, [11, 10]) grid = transit(grid, 2, [0,", "grid = sin(grid, 14, 14, -1.842523240371888, 74.23947694195837) grid = inverse(grid,", "+ y * -0.9425245660964123) / 2 grid[:,:,14] = (x *", "0.4828116495090197, 0.09546712897709621, 0.15569177834549294]) grid = sin(grid, 6, 3, -0.1377650382373763, -96.34412250071645)", "= sin(grid, 10, 10, 0.9558311639914843, -47.618914508652054) grid = shift(grid, 9,", "* alphas, axis = -1) return test_values(res.clip(-1,1)) def sin(x, t_indx,", "magnitude(grid, 15, [5, 3, 8, 0, 15], 2) grid =", "0.16072124228620793, 0.15869932715876592, 0.14757838472737334]) grid = transit(grid, 2, [1, 7], [0.18247956114317448,", "0.25942601829807205, 0.16834508849259286, 0.14540219911263502, 0.094441440303033]) grid = transit(grid, 11, [12], [1.0])", "md5 hash: ad55481e87ca5a7e9a8e92cd336d1cad # Python version: 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17", "smooth_min(grid, 13, 10, 15) grid = transit(grid, 1, [12, 8,", "GAS md5 hash: ad55481e87ca5a7e9a8e92cd336d1cad # Python version: 3.7.9 (tags/v3.7.9:13c94747c7, Aug", "return test_values(res) def smooth_min(x, t_indx, s1_indx, s2_indx, p = 10):", "grid = sin(grid, 6, 14, -1.927951619591129, -65.3028706482776) grid = prod(grid,", "2) grid = transit(grid, 13, [15, 5, 9, 4, 6,", "12, 15, 13, 3], [0.32356965941479515, 0.022696478437764827, 0.2132573540073865, 0.11957266769813353, 0.3209038404419199]) grid", "2.55681173849493) grid = sin(grid, 10, 14, 0.8649185298731181, 3.1973516320924773) grid =", "(1 / (1 - shift[i])) * 2 - 1 return", "0.018127231672754242, 0.13788777275073352, 0.01906389787670339]) grid = sin(grid, 4, 7, 3.7705302330112063, 56.91558505626969)", "= sin(grid, 1, 5, 8.18216846853571, -6.729427492311089) grid = magnitude(grid, 11,", "[0.13835365002720226, 0.008781149737259792, 0.24627334258742545, 0.04870190081124998, 0.049950480577274, 0.15123046752435387, 0.31255198044446264, 0.04415702829077187]) grid =", "11, 10, 9) grid = sin(grid, 13, 2, 4.295107938126156, 57.378601701270014)", "6, 5, 7, 4, 2], [0.39579476392315127, 0.3200094081197146, 0.06439062651950353, 0.03284446726347166, 0.04732779189481446,", "if shift < 0: res[:,:,t_indx] = np.abs((1 - (x[:,:,s_indx] +", "test_values(res) def smooth_min(x, t_indx, s1_indx, s2_indx, p = 10): res", "5, 14, -1.45141083652418, -99.85812912291547) grid = transit(grid, 0, [4, 3,", "13, 12, 3, 6], [0.1020239434902293, 0.05405846145210329, 0.11525379082942891, 0.11556721863292163, 0.12372657123165616, 0.1356897031789931,", "2, 0, 1, 5], [0.18796556626817826, 0.19260744772691155, 0.11226112831146452, 0.08161640805634696, 0.08706050582840198, 0.2243337708440404,", "-0.010214061334835559, 20.43114218394348) grid = transit(grid, 8, [1], [1.0]) grid =", "2, 7) grid = smooth_max(grid, 8, 10, 6) grid =", "grid = prod(grid, 13, [6, 3, 7]) grid = sin(grid,", "3]) grid = transit(grid, 13, [5, 15, 10], [0.13237609957996088, 0.22944646977966682,", "[0.08195235243098883, 0.6796005904358621, 0.23844705713314918]) grid = power(grid, 14, 0, 0.10854801586669052) grid", "grid = prod(grid, 11, [3]) grid = smooth_max(grid, 8, 11,", "-3.621533174445339, 24.02414911462421) grid = sin(grid, 1, 11, 0.5071121900678415, 10.950101187785563) grid", "[0.421270391024163, 0.5054038923567993, 0.07332571661903758]) grid = transit(grid, 11, [1, 15, 5,", "0.2243337708440404, 0.11415517296465624]) grid = sin(grid, 11, 13, -6.909579361872105, 70.84834564082374) grid", "11, 15) grid = sin(grid, 12, 6, -3.621533174445339, 24.02414911462421) grid", "0.07376266150860299, 0.03622483092076182, 0.09070212266434277, 0.4030414045204916, 0.07629394446370606]) grid = magnitude(grid, 13, [7,", "0.0036374435750729187, 0.12289545051895708, 0.19827928429148084]) grid = transit(grid, 8, [13, 9, 5,", "9, [5], [1.0]) grid = shift(grid, 9, 13, -5.367438086043798) grid", "0.33341754503307897]) grid = transit(grid, 13, [10, 8, 9, 12, 2],", "transit(grid, 2, [10, 11, 4, 15, 0, 6], [0.24973877983541862, 0.3378766591098989,", "transit(grid, 12, [7, 4, 10, 5], [0.5076634403621766, 0.003404332378773421, 0.04142944289977586, 0.4475027843592742])", "6, 11, -0.7697482296056479, 23.55348445076298) grid = sin(grid, 7, 7, 0.5492744322205282,", ") return arr #define grid transformation methods def transit(x, t_indx,", "11, 14], [0.10006330804326793, 0.03891760159161208, 0.005474465860804227, 0.12962618248625338, 0.03090992138168193, 0.016043163973997736, 0.13259375374543056, 0.09920705802758992,", "9, 13, -5.367438086043798) grid = magnitude(grid, 13, [2, 0], 2)", "t_indx, s_indx): res = x.copy() res[:,:,t_indx] = -x[:,:,s_indx] return test_values(res)", "-40.63773874328931) grid = sin(grid, 9, 15, -2.507870105026106, -89.43842740853354) grid =", "11, 10, 7], [0.5203714128788618, 0.068511863728177, 0.10141059844877331, 0.2728285912351676, 0.036877533709020166]) grid =", "3, 2.4622222565241207) grid = sin(grid, 10, 0, 0.5112825397666086, 37.95950546335726) grid", "= smooth_min(grid, 1, 1, 11) grid = transit(grid, 5, [11,", "5, [11, 4, 2, 1, 13, 12, 0, 8], [0.08486049729383285,", "inverse(grid, 5, 5) grid = transit(grid, 4, [8, 4, 15,", "= sin(grid, 5, 14, -1.45141083652418, -99.85812912291547) grid = transit(grid, 0,", "-0.3409112713023047, 75.93313567333723) grid = transit(grid, 11, [5, 10, 7], [0.22694849313985146,", "0.2839918932530799]) grid = sin(grid, 4, 3, 2.634465399239887, 62.07538440217337) grid =", "6, 12], [0.18067242214638962, 0.12939497982917472, 0.08164480089591167, 0.24583958083442445, 0.2244518823086713, 0.13799633398542827]) grid =", "/ 2 grid[:,:,4] = (x * -0.6644350461377522 + y *", "** (1 - shift) - 1) ** (1 / (1", "+ shift) return test_values(res) def magnitude(x, t_indx, s_indx, ord =", "grid = sin(grid, 9, 5, -5.606152225672729, -35.928477282758536) grid = transit(grid,", "sin(grid, 4, 7, 3.7705302330112063, 56.91558505626969) grid = sin(grid, 3, 9,", "x.copy() for i in range(x.shape[-1]): if shift[i] > 0: res[:,:,i]", "[7, 4, 10, 5], [0.5076634403621766, 0.003404332378773421, 0.04142944289977586, 0.4475027843592742]) grid =", "11, 4, 15, 0, 6], [0.24973877983541862, 0.3378766591098989, 0.15974656746239488, 0.027776085211312595, 0.02330072841260748,", "10, [15, 8, 13, 2], [0.32464063956303774, 0.20922781529873477, 0.16179927966914437, 0.30433226546908315]) grid", "-1.45141083652418, -99.85812912291547) grid = transit(grid, 0, [4, 3, 8], [0.23275058190778222,", "= sin(grid, 11, 7, -0.3409112713023047, 75.93313567333723) grid = transit(grid, 11,", "-75.50049353340206) grid = prod(grid, 9, [1, 4, 0, 6]) grid", "0.07914066248092186]) grid = inverse(grid, 5, 12) grid = sin(grid, 10,", "grid[:,:,4] = (x * -0.6644350461377522 + y * 0.1739322518414499) /", "0) grid = transit(grid, 2, [1, 2], [0.9078557995211777, 0.09214420047882232]) grid", "0, -0.033265790773207085, 51.94880270063618) grid = smooth_min(grid, 13, 10, 15) grid", "grid = transit(grid, 8, [6, 2], [0.6857167761482571, 0.31428322385174284]) grid =", "0.11415517296465624]) grid = sin(grid, 11, 13, -6.909579361872105, 70.84834564082374) grid =", "75.93313567333723) grid = transit(grid, 11, [5, 10, 7], [0.22694849313985146, 0.5162695719847235,", "14], [0.09662806703796267, 0.1621478194912538, 0.21548762580464817, 0.5257364876661353]) grid = inverse(grid, 1, 0)", "0.31255198044446264, 0.04415702829077187]) grid = transit(grid, 1, [3], [1.0]) grid =", "[5, 2]) grid = transit(grid, 15, [0, 3], [0.29345909580747953, 0.7065409041925205])", "56.91558505626969) grid = sin(grid, 3, 9, 1.4275963527158242, -76.78247379244436) grid =", "0: res[:,:,i] = np.abs((1 - (x [:,:,i]+ 1) / 2)", "/ 2 grid[:,:,10] = (x * -0.4262457935185371 + y *", "sin(grid, 10, 3, -2.5681840787633137, -30.256455817944243) grid = sin(grid, 8, 2,", "0], 2) grid = transit(grid, 13, [6, 2, 3, 15,", "grid = transit(grid, 13, [12, 15, 9, 2, 0, 1,", "[0.03597236183123865, 0.04938629068404894, 0.08457069101219464, 0.014801187461296406, 0.3649334871683411, 0.28062233683539095, 0.08637063851194285, 0.06076815802338077, 0.022574848472165728]) grid", "[4, 12, 14, 15, 7, 1], [0.20378471182464508, 0.038241020379710625, 0.16903312106740406, 0.3387613981701764,", "8, -0.4693746108213766, -98.17810769380118) grid = sin(grid, 12, 10, 3.6427863324838423, 99.297524709649)", "10], [0.2662646690994658, 0.2460545507972383, 0.4876807801032959]) grid = transit(grid, 2, [7], [1.0])", "0.30433226546908315]) grid = magnitude(grid, 6, [14, 5, 13, 11, 2,", "8, [1], [1.0]) grid = sin(grid, 4, 9, 0.2366252211469413, -40.63773874328931)", "= transit(grid, 1, [8, 10, 15, 14, 9], [0.33493798319460544, 0.14040206011900094,", "[0.23701292672659616, 0.08316792464084911, 0.017867439461611043, 0.36417402420248035, 0.02841485585755143, 0.19916101840344472, 0.03422984110049058, 0.03597196960697647]) grid =", "2, 4, 8, 5, 6, 7]) grid = transit(grid, 8,", "[13, 11, 5], [0.421270391024163, 0.5054038923567993, 0.07332571661903758]) grid = transit(grid, 11,", "transit(grid, 8, [14], [1.0]) grid = transit(grid, 4, [1, 12,", "14, 0, 0.10854801586669052) grid = shift(grid, 8, 9, 2.766857264282361) grid", "= transit(grid, 11, [12, 6, 9], [0.1597221050818672, 0.523275926379751, 0.31700196853838186]) grid", "3, 5], [0.11084510086381213, 0.003439701966452383, 0.10819642722960272, 0.15371289739415475, 0.25812192912399506, 0.005727171643985687, 0.14633649245899077, 0.033890406689391105,", "= sin(grid, 4, 7, 3.7705302330112063, 56.91558505626969) grid = sin(grid, 3,", "= transit(grid, 0, [7, 11, 15, 8, 12, 0, 4,", "0.03891760159161208, 0.005474465860804227, 0.12962618248625338, 0.03090992138168193, 0.016043163973997736, 0.13259375374543056, 0.09920705802758992, 0.1415090600653345, 0.09597789664069131, 0.06106766497801195,", "8, 10, 2.5947698108630664, -90.74050288622541) grid = sin(grid, 9, 8, -0.8743741598911887,", "5, [9, 13, 3, 14], [0.28064413535886806, 0.5181512474389621, 0.1504742947642479, 0.050730322437922]) grid", "-97.13150019385894) grid = transit(grid, 11, [0, 9], [0.1290607634325389, 0.8709392365674611]) grid", "4, 2, 8, 14], [0.3705316303566195, 0.1755951969700656, 0.043989590834687294, 0.22866693087969006, 0.1812166509589377]) grid", "0, [12, 6, 4, 9, 1, 0, 14], [0.36336761526831185, 0.17372789204937897,", "4, 15, 6, 14, 0, 3, 11], [0.13835365002720226, 0.008781149737259792, 0.24627334258742545,", "0.13788777275073352, 0.01906389787670339]) grid = sin(grid, 4, 7, 3.7705302330112063, 56.91558505626969) grid", "8], [0.08486049729383285, 0.15069099224942706, 0.024923245737924458, 0.07191051851248272, 0.25942601829807205, 0.16834508849259286, 0.14540219911263502, 0.094441440303033]) grid", "= transit(grid, 11, [5, 10, 7], [0.22694849313985146, 0.5162695719847235, 0.25678193487542517]) grid", "img[x*SIZE:(x + 1)*SIZE, y*SIZE:(y+1)*SIZE] = grid[:,:,j] img = (img +", "= prod(grid, 4, [10, 0, 2, 4, 8, 5, 6,", "[10, 11, 8, 15, 0, 12, 3]) grid = transit(grid,", "0.1812166509589377]) grid = sin(grid, 4, 2, -3.329894296119046, -76.41676919069447) grid =", "smooth_min(grid, 7, 4, 13) grid = magnitude(grid, 5, [7], 2)", "grid = magnitude(grid, 13, [8], 2) grid = transit(grid, 13,", "smooth_min(x, t_indx, s1_indx, s2_indx, p = 10): res = x.copy()", "transit(x, t_indx, s_indx, alphas): res = x.copy() res[:,:,t_indx] = np.sum(x[:,:,s_indx]", "3, [7, 3, 12, 9], [0.13643904772292245, 0.38438336340747, 0.15936221296996333, 0.31981537589964426]) grid", "= power(grid, 6, 5, 0.9223892145169746) grid = transit(grid, 2, [9,", "[11, 1, 12, 9, 0, 8, 15, 2, 10, 14],", "grid = transit(grid, 7, [14, 2, 13, 1, 11, 3,", "15, 9, 10], [0.10267794314653868, 0.019022820046952493, 0.061606568183823145, 0.4832751235896067, 0.33341754503307897]) grid =", "8, 5, 2, 3], [0.23701292672659616, 0.08316792464084911, 0.017867439461611043, 0.36417402420248035, 0.02841485585755143, 0.19916101840344472,", "0.008288053054498123]) grid = prod(grid, 15, [12, 15]) grid = prod(grid,", "grid = transit(grid, 0, [14, 3, 11, 10, 7], [0.5203714128788618,", "84.51374680259704) grid = sin(grid, 1, 1, -0.183401440709518, -88.40242580975152) grid =", "* 2 - 1 if shift[i] < 0: res[:,:,i] =", "8.18216846853571, -6.729427492311089) grid = magnitude(grid, 11, [8, 2], 2) grid", "4, 2.1961962516242517) grid = prod(grid, 15, [3, 5, 0, 1])", "12, -4.078686662791614, 24.459526349523884) grid = inverse(grid, 15, 10) grid =", "** (1 + shift) - 1) ** (1 / (1", "(1 / (1 - shift)) * 2 - 1 return", "11, 0, 14], 2) grid = sin(grid, 4, 5, -1.8457292172108153,", "grid = shift(grid, 8, 9, 2.766857264282361) grid = transit(grid, 3,", "2, 11, 13], [0.03597236183123865, 0.04938629068404894, 0.08457069101219464, 0.014801187461296406, 0.3649334871683411, 0.28062233683539095, 0.08637063851194285,", "[1, 15, 5, 0, 6, 12, 2, 7, 4], [0.03047869593495055,", "* 0.4365452266748293) / 2 grid[:,:,7] = (x * 0.5049774961793401 +", "[0.5001532946669459, 0.42070604285213226, 0.07914066248092186]) grid = inverse(grid, 5, 12) grid =", "[0.24803411847529433, 0.2425397323068922, 0.0904752958055755, 0.11683555248582808, 0.30211530092641004]) grid = sin(grid, 5, 2,", "[11, 0, 5], 2) grid = magnitude(grid, 9, [15, 3,", "(x * -0.8484277738516293 + y * -0.5155435342135386) / 2 grid[:,:,4]", "sin(grid, 1, 11, 0.5071121900678415, 10.950101187785563) grid = shift(grid, 13, 3,", "* 0.8090860808441245 + y * 0.2914526739617249) / 2 grid[:,:,2] =", "grid[:,:,7] = (x * 0.5049774961793401 + y * 0.05113255120007798) /", "= transit(grid, 8, [1], [1.0]) grid = sin(grid, 4, 9,", "= magnitude(grid, 9, [15, 7], 2) grid = transit(grid, 4,", "5, 13) grid = sin(grid, 9, 10, -1.8565532127479274, -54.75186223635349) grid", "3.386521226555936, 60.95572898751007) grid = shift(grid, 14, 2, 2.55681173849493) grid =", "0.0829726599151083, 0.41561584065110807]) grid = transit(grid, 2, [0, 4, 2], [0.010597803396528332,", "0.13259375374543056, 0.09920705802758992, 0.1415090600653345, 0.09597789664069131, 0.06106766497801195, 0.14032187015082653, 0.008288053054498123]) grid = prod(grid,", "0.13338109758306646, 0.050712192407629864]) grid = transit(grid, 7, [14, 2, 13, 1,", "* 4, SIZE * 4)) for j in range(GRID_CHANNELS): x", "14, [13]) grid = sin(grid, 1, 12, -0.5111321725063378, 18.261359970959475) grid", "arr #define grid transformation methods def transit(x, t_indx, s_indx, alphas):", "4, [14, 11, 12, 13, 4, 7], [0.23221079251346607, 0.3307147367708056, 0.26199556841553734,", "= smooth_max(grid, 8, 11, 15) grid = sin(grid, 12, 6,", "13, 8], 2) grid = transit(grid, 8, [3, 15, 9,", "9, 4, 3.0281102269529683, 11.185401112275173) grid = sin(grid, 10, 4, 1.2844464834351186,", "grid = transit(grid, 6, [15], [1.0]) grid = sin(grid, 15,", "0.013987480750913602, 0.1337935702206657, 0.39097327478734406, 0.08978424496428203]) grid = smooth_min(grid, 9, 9, 10)", "grid = smooth_max(grid, 2, 2, 11) grid = sin(grid, 13,", "5, 4, 15, 13], [0.1869735689344564, 0.06343641920215143, 0.038951322931441136, 0.04613309733662021, 0.19750663742298355, 0.16072124228620793,", "transit(grid, 1, [8, 10, 15, 14, 9], [0.33493798319460544, 0.14040206011900094, 0.3010385316537353,", "[0.24973877983541862, 0.3378766591098989, 0.15974656746239488, 0.027776085211312595, 0.02330072841260748, 0.20156117996836745]) grid = smooth_min(grid, 0,", "#import python libraries import os #OS version: default import numpy", "7], [0.06492287400539203, 0.21223490901058306, 0.36311130408652753, 0.09994467226348329, 0.12833432959710458, 0.1314519110369097]) grid = transit(grid,", "0.08415696601637544, 0.18541009701166816, 0.011062110917544764, 0.017334502896306194, 0.1522407828502505]) grid = prod(grid, 2, [8,", "[1.0]) grid = transit(grid, 8, [9, 10, 2, 15, 13],", "9, -4.261918262131112, 18.680580924548693) grid = smooth_max(grid, 2, 2, 11) grid", "grid[:,:,3] = (x * -0.8484277738516293 + y * -0.5155435342135386) /", "2 grid[:,:,11] = (x * 0.8435706697714382 + y * 0.7746597063144072)", "13, [6, 15, 11, 9, 12], [0.21908823570589997, 0.1636179110868493, 0.03797238284324163, 0.29532957711092916,", "[10, 0, 2, 4, 8, 5, 6, 7]) grid =", "0.7065409041925205]) grid = sin(grid, 12, 4, -1.6398586072056767, 84.51374680259704) grid =", "= prod(grid, 15, [12, 15]) grid = prod(grid, 8, [11,", "0.10820806749406277, 0.21657812527174225]) grid = transit(grid, 3, [7, 3, 12, 9],", "0.1337935702206657, 0.39097327478734406, 0.08978424496428203]) grid = smooth_min(grid, 9, 9, 10) grid", "4, 3, 2.634465399239887, 62.07538440217337) grid = sin(grid, 7, 2, 3.41043792019894,", "-0.9425245660964123) / 2 grid[:,:,14] = (x * -0.7665883618456049 + y", "hash: ad55481e87ca5a7e9a8e92cd336d1cad # Python version: 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020,", "grid = magnitude(grid, 1, [7], 2) grid = smooth_min(grid, 7,", "* -0.5986715486203882 + y * 0.9515468928881716) / 2 grid[:,:,6] =", "7], [0.22694849313985146, 0.5162695719847235, 0.25678193487542517]) grid = sin(grid, 9, 9, -4.261918262131112,", "9, 9, -4.261918262131112, 18.680580924548693) grid = smooth_max(grid, 2, 2, 11)", "sin(grid, 12, 13, 3.6938747278005737, 76.37702042567852) grid = magnitude(grid, 15, [5,", "(img + 1) * 127.5 im = Image.fromarray(np.uint8(img)) im.save(os.path.basename(__file__) +", "10, 7, 4]) grid = smooth_min(grid, 7, 12, 0) grid", "far! [ %.2f : %.2f ]'%(np.amin(arr), np.amax(arr)) ) return arr", "= sin(grid, 1, 12, -0.5111321725063378, 18.261359970959475) grid = power(grid, 6,", "[1, 4, 0, 6]) grid = transit(grid, 8, [9, 3],", "= sin(grid, 14, 8, -0.4693746108213766, -98.17810769380118) grid = sin(grid, 12,", "grid = transit(grid, 11, [2], [1.0]) #create color space def", "sin(grid, 1, 7, 1.6265187305620117, -97.13150019385894) grid = transit(grid, 11, [0,", "0, [4, 13], 2) grid = transit(grid, 8, [5, 4,", "5], [0.24039798004748805, 0.2886075990223525, 0.18742374307846998, 0.11615833154358073, 0.16741234630810867]) grid = prod(grid, 0,", "grid = transit(grid, 13, [13, 0, 5, 14], [0.09662806703796267, 0.1621478194912538,", "5, 14], [0.09662806703796267, 0.1621478194912538, 0.21548762580464817, 0.5257364876661353]) grid = inverse(grid, 1,", "transit(grid, 8, [9, 10, 2, 15, 13], [0.3265190472987195, 0.21568397721657098, 0.06226802479442838,", "62.07538440217337) grid = sin(grid, 7, 2, 3.41043792019894, 65.36615977552518) grid =", "12, 10, -3.1151555334821888, 17.571856948335267) grid = prod(grid, 6, [2, 4,", "0.25812192912399506, 0.005727171643985687, 0.14633649245899077, 0.033890406689391105, 0.05550396325806974, 0.1242259093715456]) grid = smooth_max(grid, 10,", "10, 15, 14, 9], [0.33493798319460544, 0.14040206011900094, 0.3010385316537353, 0.07412413198773361, 0.14949729304492473]) grid", "transit(grid, 8, [1], [1.0]) grid = sin(grid, 4, 9, 0.2366252211469413,", "-1.115193397983063) grid = smooth_max(grid, 13, 3, 8) grid = transit(grid,", "transit(grid, 2, [7], [1.0]) grid = sin(grid, 10, 9, 6.219381309190064,", "= sin(grid, 1, 2, -1.5301674594368837, -60.29431568717391) grid = transit(grid, 2,", "= magnitude(grid, 6, [14, 5, 13, 11, 2, 9], 2)", "1) / 2) ** (1 + shift) - 1) **", "- (x [:,:,i]+ 1) / 2) ** (1 - shift[i])", "= transit(grid, 4, [3, 13, 9, 8, 5, 2, 12],", "* -0.4262457935185371 + y * -0.6817079327248272) / 2 grid[:,:,11] =", "from PIL import Image #PIL version: 8.1.2 #set initial params", "11, 0.5071121900678415, 10.950101187785563) grid = shift(grid, 13, 3, 5.677279514103952) grid", "7, 8) grid = smooth_max(grid, 10, 3, 15) grid =", "14, 2, 2.55681173849493) grid = sin(grid, 10, 14, 0.8649185298731181, 3.1973516320924773)", "= sin(grid, 7, 7, 0.5492744322205282, 35.873568370773654) grid = transit(grid, 7,", "y * 0.2914526739617249) / 2 grid[:,:,2] = (x * 0.9804797761207309", "y * -0.5063344373124843) / 2 grid[:,:,3] = (x * -0.8484277738516293", "0.022696478437764827, 0.2132573540073865, 0.11957266769813353, 0.3209038404419199]) grid = transit(grid, 6, [1, 7,", "0.3209038404419199]) grid = transit(grid, 6, [1, 7, 0, 2, 9,", "1, 13, 12, 0, 8], [0.08486049729383285, 0.15069099224942706, 0.024923245737924458, 0.07191051851248272, 0.25942601829807205,", "0.15569177834549294]) grid = sin(grid, 6, 3, -0.1377650382373763, -96.34412250071645) grid =", "2, 10, 14], [0.20381942291270427, 0.07753380798970702, 0.11445683149439734, 0.08475226158626031, 0.1416941580568898, 0.020968563089492034, 0.0847896752697893,", "11, [12], [1.0]) grid = power(grid, 3, 5, 0.10200689258338674) grid", "grid = transit(grid, 13, [5, 15, 10], [0.13237609957996088, 0.22944646977966682, 0.6381774306403722])", "6, [2, 4, 13]) grid = transit(grid, 5, [1, 9,", "/ 2 grid[:,:,12] = (x * -0.5303146721156469 + y *", "3, 8], [0.03500911832175082, 0.03265868671024263, 0.3248025339288217, 0.4234363710484886, 0.13338109758306646, 0.050712192407629864]) grid =", "PIL import Image #PIL version: 8.1.2 #set initial params SIZE", "(x * -0.7665883618456049 + y * -0.3867357840809138) / 2 grid[:,:,15]", "sin(grid, 9, 8, -0.8743741598911887, 15.92872484723533) grid = transit(grid, 4, [3,", "[10, 11, 4, 15, 0, 6], [0.24973877983541862, 0.3378766591098989, 0.15974656746239488, 0.027776085211312595,", "-37.09230830685477) grid = transit(grid, 9, [8], [1.0]) grid = sin(grid,", "grid = prod(grid, 10, [5, 2]) grid = transit(grid, 15,", "17.571856948335267) grid = prod(grid, 6, [2, 4, 13]) grid =", "2, [3, 11, 1]) grid = smooth_min(grid, 3, 2, 7)", "0.5054038923567993, 0.07332571661903758]) grid = transit(grid, 11, [1, 15, 5, 0,", "0.8175204388568255]) grid = transit(grid, 8, [11, 15, 0], [0.08195235243098883, 0.6796005904358621,", "2, 15, -2.5319898824657017, -45.01904701883333) grid = shift(grid, 5, 5, 3.1584260780059252)", "12, [7, 4, 10, 5], [0.5076634403621766, 0.003404332378773421, 0.04142944289977586, 0.4475027843592742]) grid", "sin(grid, 3, 12, -4.078686662791614, 24.459526349523884) grid = inverse(grid, 15, 10)", "10, [10], [1.0]) grid = transit(grid, 1, [8, 10, 15,", "2, -3.329894296119046, -76.41676919069447) grid = smooth_min(grid, 11, 8, 12) grid", "- 0.5) * 2).reshape((SIZE, 1)).repeat(SIZE, 1) grid[:,:,0] = (x *", "Image #PIL version: 8.1.2 #set initial params SIZE = 768", "transit(grid, 4, [3, 13, 9, 8, 5, 2, 12], [0.05731677054419865,", "grid = inverse(grid, 15, 10) grid = shift(grid, 6, 1,", "12, 3]) grid = transit(grid, 13, [5, 15, 10], [0.13237609957996088,", "= (x * -0.7665883618456049 + y * -0.3867357840809138) / 2", "2 grid[:,:,10] = (x * -0.4262457935185371 + y * -0.6817079327248272)", "grid = sin(grid, 9, 7, -2.4657577404884132, 72.95418196004374) grid = transit(grid,", "0.005727171643985687, 0.14633649245899077, 0.033890406689391105, 0.05550396325806974, 0.1242259093715456]) grid = smooth_max(grid, 10, 15,", "UTC # GAS change date: 2021-11-28 09:20:21 UTC # GAS", "3, 2, 7) grid = smooth_max(grid, 8, 10, 6) grid", "0.014801187461296406, 0.3649334871683411, 0.28062233683539095, 0.08637063851194285, 0.06076815802338077, 0.022574848472165728]) grid = transit(grid, 4,", "grid = smooth_min(grid, 7, 4, 13) grid = magnitude(grid, 5,", "[10, 9, 12, 4, 7, 15], 2) grid = sin(grid,", "= prod(grid, 0, [0, 1, 2, 14]) grid = prod(grid,", "grid = smooth_min(grid, 7, 12, 0) grid = transit(grid, 2,", "-0.41048419195488317) / 2 grid[:,:,13] = (x * -0.5864100240508576 + y", "-53.43885199947502) grid = sin(grid, 10, 0, 7.741409383532979, -12.082110529508299) grid =", "[1, 14, 8], [0.38986786543390084, 0.40057743619803005, 0.20955469836806906]) grid = transit(grid, 9,", "sin(grid, 9, 7, -2.4657577404884132, 72.95418196004374) grid = transit(grid, 12, [7,", "libraries import os #OS version: default import numpy as np", "[11, 10]) grid = transit(grid, 2, [0, 15, 10], [0.005204838856346087,", "-5.606152225672729, -35.928477282758536) grid = transit(grid, 0, [7, 11, 15, 8,", "= -1, ord = ord) / np.sqrt(len(s_indx)) return test_values(res) def", "0, 3, 15, 4, 2, 11, 13], [0.03597236183123865, 0.04938629068404894, 0.08457069101219464,", "0.050712192407629864]) grid = transit(grid, 7, [14, 2, 13, 1, 11,", "x.copy() res[:,:,t_indx] = np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx]) ** p return test_values(res)", "* 0.9515468928881716) / 2 grid[:,:,6] = (x * 0.2265055481768512 +", "import numpy as np #Numpy version: 1.19.5 from PIL import", "[0.33493798319460544, 0.14040206011900094, 0.3010385316537353, 0.07412413198773361, 0.14949729304492473]) grid = magnitude(grid, 10, [11,", "1) / 2) ** (1 - shift[i]) - 1) **", "8) grid = smooth_max(grid, 10, 3, 15) grid = magnitude(grid,", "= sin(grid, 5, 10, -1.5052434957207308, 24.900059771988836) grid = sin(grid, 8,", "grid = sin(grid, 1, 5, 8.18216846853571, -6.729427492311089) grid = magnitude(grid,", "transit(grid, 15, [13, 3], [0.5897775709748927, 0.41022242902510725]) grid = sin(grid, 12,", "4, 7, 3.7705302330112063, 56.91558505626969) grid = sin(grid, 3, 9, 1.4275963527158242,", "0.03797238284324163, 0.29532957711092916, 0.2839918932530799]) grid = sin(grid, 4, 3, 2.634465399239887, 62.07538440217337)", "prod(grid, 1, [12, 13]) grid = sin(grid, 6, 14, -1.927951619591129,", "/ (1 + shift[i])) + 1) * 2 - 1", "2 grid[:,:,1] = (x * 0.8090860808441245 + y * 0.2914526739617249)", "10, 2, 0.9155140652310594, -34.1653400637653) grid = transit(grid, 8, [14], [1.0])", "smooth_max(grid, 2, 2, 11) grid = sin(grid, 13, 13, 7.718114740496995,", "transit(grid, 3, [11, 1, 12, 9, 0, 8, 15, 2,", "= shift(grid, 2, 4, 2.1961962516242517) grid = prod(grid, 15, [3,", "5, 0, 1]) grid = sin(grid, 6, 11, -0.7697482296056479, 23.55348445076298)", "[0.1713900685471786, 0.14082681623065177, 0.19859698568682838, 0.4891861295353413]) grid = transit(grid, 13, [12, 15,", "* -0.3391983246964396 + y * -0.5135707069423852) / 2 grid[:,:,9] =", "grid = transit(grid, 11, [0, 9], [0.1290607634325389, 0.8709392365674611]) grid =", "[1.0]) grid = sin(grid, 4, 9, 0.2366252211469413, -40.63773874328931) grid =", "-71.03631884776823) grid = sin(grid, 9, 6, 1.6821417847846682, -64.12547446801875) grid =", "x.copy() res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx], axis = -1, ord = ord)", "= transit(grid, 13, [1, 2, 7, 5, 8, 9, 15],", "= (x * -0.6644350461377522 + y * 0.1739322518414499) / 2", "15], [0.085742434722219, 0.4119764535375412, 0.08377067725345017, 0.13045782410775286, 0.02917564277599849, 0.12489006625007311, 0.13398690135296518]) grid =", "grid = transit(grid, 5, [9, 13, 3, 14], [0.28064413535886806, 0.5181512474389621,", "(1 - shift[i])) * 2 - 1 return test_values(res) res", "grid = prod(grid, 9, [10, 11, 8, 15, 0, 12,", "= sin(grid, 3, 9, 1.4275963527158242, -76.78247379244436) grid = sin(grid, 2,", "= transit(grid, 1, [7, 2, 6, 1, 4, 0], [0.2070905138265326,", "7], 2) grid = sin(grid, 4, 8, 4.28026157040775, -75.14180284322572) grid", "3.501615294498545, -75.50049353340206) grid = prod(grid, 9, [1, 4, 0, 6])", "res = res / 1 res = ((res + 1)", "0.4832751235896067, 0.33341754503307897]) grid = transit(grid, 13, [10, 8, 9, 12,", "grid = sin(grid, 4, 10, -3.680544885171134, 30.633332441673872) grid = transit(grid,", "-6.729427492311089) grid = magnitude(grid, 11, [8, 2], 2) grid =", "5, 6, 7]) grid = transit(grid, 8, [3], [1.0]) grid", "/ (1 + shift)) + 1) * 2 - 1", "10, 8, 5, 2, 3], [0.23701292672659616, 0.08316792464084911, 0.017867439461611043, 0.36417402420248035, 0.02841485585755143,", "grid = smooth_max(grid, 10, 15, 10) grid = transit(grid, 11,", "/ 2 grid[:,:,8] = (x * -0.3391983246964396 + y *", "11], [0.37033495928182997, 0.6296650407181701]) grid = smooth_min(grid, 4, 1, 8) grid", "grid = magnitude(grid, 14, [4, 6, 1, 0], 2) grid", "2], [0.43102537693091664, 0.25433300797798253, 0.21618454566402304, 0.046743011673522995, 0.05171405775355483]) grid = sin(grid, 10,", "* np.pi * scale + shift) return test_values(res) def magnitude(x,", "= sin(grid, 14, 14, -1.842523240371888, 74.23947694195837) grid = inverse(grid, 7,", "0, 14], [0.36336761526831185, 0.17372789204937897, 0.08036453739500136, 0.09747098994785518, 0.040818441056887325, 0.16796111771248814, 0.07628940657007711]) grid", "0.262107802795733, 0.038001653167336905, 0.2112967596903696, 0.002128256606899112, 0.08103579316557531]) grid = shift(grid, 3, 3,", "0.3357862916746864, 0.2671958928603256]) grid = smooth_min(grid, 1, 1, 11) grid =", "-3.1151555334821888, 17.571856948335267) grid = prod(grid, 6, [2, 4, 13]) grid", "1) grid[:,:,0] = (x * 0.9386329219527516 + y * -0.45147169454413794)", "= transit(grid, 2, [2, 0, 11, 10, 5, 4, 15,", "= np.sin(x[:,:,s_indx] * 0.5 * np.pi * scale + shift)", "0.2682295923869092]) grid = magnitude(grid, 8, [10, 9, 12, 4, 7,", "for j in range(GRID_CHANNELS): x = j % 4 y", "has None elements!') if np.amin(arr) < -1 or np.amax(arr) >", "12, 14, 1.097917736937588, 58.87772371184383) grid = transit(grid, 11, [9, 11],", "= sin(grid, 4, 15, -1.9527829039221054, 20.537776250912316) grid = transit(grid, 7,", "11, 12, 13, 4, 7], [0.23221079251346607, 0.3307147367708056, 0.26199556841553734, 0.018127231672754242, 0.13788777275073352,", "[0.01847979792505241, 0.33442336387003857, 0.15192425697494277, 0.4951725812299663]) grid = sin(grid, 4, 8, 3.386521226555936,", "transit(grid, 4, [14, 11, 12, 13, 4, 7], [0.23221079251346607, 0.3307147367708056,", "y * 0.05113255120007798) / 2 grid[:,:,8] = (x * -0.3391983246964396", "0.2112967596903696, 0.002128256606899112, 0.08103579316557531]) grid = shift(grid, 3, 3, 2.4622222565241207) grid", "2, 15, 13], [0.3265190472987195, 0.21568397721657098, 0.06226802479442838, 0.0028158122366541832, 0.39271313845362693]) grid =", "23.55348445076298) grid = sin(grid, 7, 7, 0.5492744322205282, 35.873568370773654) grid =", "0.18541009701166816, 0.011062110917544764, 0.017334502896306194, 0.1522407828502505]) grid = prod(grid, 2, [8, 7,", "x.copy() res[:,:,t_indx] = np.prod(x[:,:,s_indx], -1) return test_values(res) def power(x, t_indx,", "grid = prod(grid, 3, [2, 6, 10, 7, 4]) grid", "5, 7], [0.06492287400539203, 0.21223490901058306, 0.36311130408652753, 0.09994467226348329, 0.12833432959710458, 0.1314519110369097]) grid =", "8, 9, 0, 1], [0.05863158300898051, 0.3467981515651057, 0.262107802795733, 0.038001653167336905, 0.2112967596903696, 0.002128256606899112,", "[1.0]) grid = sin(grid, 3, 12, -4.078686662791614, 24.459526349523884) grid =", "0.06439062651950353, 0.03284446726347166, 0.04732779189481446, 0.13963294227934445]) grid = smooth_min(grid, 0, 13, 15)", "15) grid = transit(grid, 1, [12, 8, 10, 4, 2],", "1.4677093499726706, 1.2451388311186942]) res = res / 1 res = ((res", "11, 15, 8, 12, 0, 4, 14, 3, 5], [0.11084510086381213,", "3, [6, 14, 0, 3, 15, 4, 2, 11, 13],", "initial grid grid = np.zeros((SIZE, SIZE, GRID_CHANNELS)) x = ((np.arange(SIZE)/(SIZE-1)", "2, 3, 15, 5, 7], [0.06492287400539203, 0.21223490901058306, 0.36311130408652753, 0.09994467226348329, 0.12833432959710458,", "0.21568397721657098, 0.06226802479442838, 0.0028158122366541832, 0.39271313845362693]) grid = magnitude(grid, 11, [13, 10,", "8], 2) grid = transit(grid, 8, [3, 15, 9, 6,", "= sin(grid, 12, 14, 1.097917736937588, 58.87772371184383) grid = transit(grid, 11,", "= -np.log((np.exp(-x[:,:,s1_indx] * p) + np.exp(-x[:,:,s2_indx] * p)) ** (1/p))", "def prod(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx] = np.prod(x[:,:,s_indx],", "[4], 2) grid = sin(grid, 1, 5, 8.18216846853571, -6.729427492311089) grid", "= shift(grid, 3, 3, 2.4622222565241207) grid = sin(grid, 10, 0,", "/ 2 grid[:,:,11] = (x * 0.8435706697714382 + y *", "0.2526657169525107, 0.08415696601637544, 0.18541009701166816, 0.011062110917544764, 0.017334502896306194, 0.1522407828502505]) grid = prod(grid, 2,", "0.17667886361751853, 0.15211061797378253, 0.016462544099609754, 0.0072484377164178625, 0.4477791048998878, 0.11849249751317383]) grid = transit(grid, 10,", "sin(grid, 7, 2, 3.41043792019894, 65.36615977552518) grid = transit(grid, 0, [14,", "0, 14], [0.29712982335534416, 0.2526657169525107, 0.08415696601637544, 0.18541009701166816, 0.011062110917544764, 0.017334502896306194, 0.1522407828502505]) grid", "0.3454992433435407, 0.15226309381221942]) grid = magnitude(grid, 10, [7, 15, 5], 2)", "= transit(grid, 2, [10, 11, 4, 15, 0, 6], [0.24973877983541862,", "14, -1.927951619591129, -65.3028706482776) grid = prod(grid, 14, [13]) grid =", "11, 8, 4.303514875116891, -67.11152580467314) grid = prod(grid, 5, [3, 9,", "3.41043792019894, 65.36615977552518) grid = transit(grid, 0, [14, 3, 11, 10,", "prod(grid, 6, [2, 4, 13]) grid = transit(grid, 5, [1,", "= magnitude(grid, 14, [4], 2) grid = sin(grid, 1, 5,", "= sin(grid, 4, 4, 3.47544933993972, -37.11795195118333) grid = sin(grid, 11,", "1): res = x.copy() res[:,:,t_indx] = np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx]) **", "0.38648847983305307, 0.3417883406507845]) grid = transit(grid, 15, [7, 3], [0.9172074355564371, 0.08279256444356292])", "= sin(grid, 2, 15, -2.5319898824657017, -45.01904701883333) grid = shift(grid, 5,", "range(x.shape[-1]): if shift[i] > 0: res[:,:,i] = (-np.abs(((x[:,:,i] + 1)", "-1.8565532127479274, -54.75186223635349) grid = transit(grid, 10, [14], [1.0]) grid =", "1.07 return test_values(res) def smooth_min(x, t_indx, s1_indx, s2_indx, p =", "0.36311130408652753, 0.09994467226348329, 0.12833432959710458, 0.1314519110369097]) grid = transit(grid, 8, [6, 2],", "0.16741234630810867]) grid = prod(grid, 0, [0, 1, 2, 14]) grid", "2, -1.5301674594368837, -60.29431568717391) grid = transit(grid, 2, [13, 11, 5],", "5, [3, 9, 2]) grid = sin(grid, 5, 1, 2.0751861425380627,", "[4, 10, 1, 13, 5, 0, 7, 8, 9, 12,", "transit(grid, 14, [14, 13, 15], [0.530662002197574, 0.1082014600047566, 0.36113653779766947]) grid =", "+ y * -0.7671554143072785) / 2 #apply transformations to the", "55.242200715207815) grid = sin(grid, 12, 10, -3.1151555334821888, 17.571856948335267) grid =", "0.5) * 2).reshape((1, SIZE)).repeat(SIZE, 0) y = ((np.arange(SIZE)/(SIZE-1) - 0.5)", "0, 13, 15) grid = smooth_max(grid, 5, 8, 4) grid", "/ 2) ** (1 + shift) - 1) ** (1", "= sin(grid, 1, 11, 0.5071121900678415, 10.950101187785563) grid = shift(grid, 13,", "1.439019575760617, 13.126437741104823) grid = transit(grid, 10, [15, 8, 13, 2],", "p) + np.exp(x[:,:,s2_indx] * p)) ** (1/p)) / 1.07 return", "6, 12, 2, 7, 4], [0.03047869593495055, 0.024092687676923453, 0.02665655056773558, 0.17667886361751853, 0.15211061797378253,", "5, -5.606152225672729, -35.928477282758536) grid = transit(grid, 0, [7, 11, 15,", "sin(grid, 11, 13, -6.909579361872105, 70.84834564082374) grid = transit(grid, 2, [11,", "9, 3, 10, 4], [0.24075568684771534, 0.02527375632067568, 0.4828116495090197, 0.09546712897709621, 0.15569177834549294]) grid", "[9]) grid = sin(grid, 4, 3, 0.10154488887533689, 12.479110491961137) grid =", "4, 15, 13], [0.1869735689344564, 0.06343641920215143, 0.038951322931441136, 0.04613309733662021, 0.19750663742298355, 0.16072124228620793, 0.15869932715876592,", "prod(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx] = np.prod(x[:,:,s_indx], -1)", "smooth_max(x, t_indx, s1_indx, s2_indx, p = 10): res = x.copy()", "res = ((res + 1) / 2 * 255).clip(0,255) #save", "* -0.8484277738516293 + y * -0.5155435342135386) / 2 grid[:,:,4] =", "- (x[:,:,s_indx] + 1) / 2) ** (1 - shift)", "2 #apply transformations to the grid grid = transit(grid, 4,", "= sin(grid, 11, 6, 1.576100090732909, -21.508000199215132) grid = shift(grid, 11,", "[11, 4, 15, 10, 8, 5, 2, 3], [0.23701292672659616, 0.08316792464084911,", "* -0.5063344373124843) / 2 grid[:,:,3] = (x * -0.8484277738516293 +", "0.002128256606899112, 0.08103579316557531]) grid = shift(grid, 3, 3, 2.4622222565241207) grid =", "res[:,:,t_indx] = (-np.abs(((x[:,:,s_indx] + 1) / 2) ** (1 +", "UTC # GAS md5 hash: ad55481e87ca5a7e9a8e92cd336d1cad # Python version: 3.7.9", "res = x.copy() res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx], axis = -1, ord", "shift) - 1) ** (1 / (1 + shift)) +", "= power(grid, 0, 12, 2.5526439221510495) grid = sin(grid, 4, 10,", "transit(grid, 1, [4, 14, 0, 13], [0.2785496566747933, 0.004915230889640017, 0.30146401859790545, 0.4150710938376613])", "grid = sin(grid, 9, 10, -1.8565532127479274, -54.75186223635349) grid = transit(grid,", "= (x * -0.4262457935185371 + y * -0.6817079327248272) / 2", "2 - 1 if shift < 0: res[:,:,t_indx] = np.abs((1", "= sin(grid, 3, 12, 6.470760426148978, -53.62090724330151) grid = sin(grid, 10,", "= sin(grid, 10, 2, 0.9155140652310594, -34.1653400637653) grid = transit(grid, 8,", "/ 2 grid[:,:,5] = (x * -0.5986715486203882 + y *", "GRID_CHANNELS = 16 def test_values(arr): if np.isnan(arr).any(): raise Exception('Array has", "/ 1 res = ((res + 1) / 2 *", "10, [1], [1.0]) grid = transit(grid, 15, [15], [1.0]) grid", "transit(grid, 13, [6, 15, 11, 9, 12], [0.21908823570589997, 0.1636179110868493, 0.03797238284324163,", "3, 10, 4], [0.24075568684771534, 0.02527375632067568, 0.4828116495090197, 0.09546712897709621, 0.15569177834549294]) grid =", "3, [2, 6, 10, 7, 4]) grid = smooth_min(grid, 7,", "[12, 6, 4, 9, 1, 0, 14], [0.36336761526831185, 0.17372789204937897, 0.08036453739500136,", "13], 2) grid = transit(grid, 8, [5, 4, 15, 6,", "1, 2, 14]) grid = prod(grid, 9, [10, 11, 8,", "14, -1.842523240371888, 74.23947694195837) grid = inverse(grid, 7, 8) grid =", "-6.496603906160505, -73.75617586359363) grid = transit(grid, 6, [6, 14], [0.7201753385758813, 0.2798246614241187])", "img = np.zeros((SIZE * 4, SIZE * 4)) for j", "* p)) ** (1/p)) / 1.07 return test_values(res) def prod(x,", "= ((res + 1) / 2 * 255).clip(0,255) #save results", "0, [5, 0], 2) grid = transit(grid, 6, [15, 8],", "0.10200689258338674) grid = transit(grid, 2, [10, 11, 4, 15, 0,", "3, 0.10154488887533689, 12.479110491961137) grid = magnitude(grid, 1, [7], 2) grid", "ord) / np.sqrt(len(s_indx)) return test_values(res) def shift(x, t_indx, s_indx, shift):", "* 0.9804797761207309 + y * -0.5063344373124843) / 2 grid[:,:,3] =", "-35.928477282758536) grid = transit(grid, 0, [7, 11, 15, 8, 12,", "+= shift_colors(grid[:,:,0:1].repeat(3, -1), [1.9355805467383669, 1.4677093499726706, 1.2451388311186942]) res = res /", "grid = sin(grid, 7, 15, -4.9164570678736865, 86.15931416043557) grid = sin(grid,", "13, 7], [0.16813621041531998, 0.42150135317124293, 0.410362436413437]) grid = inverse(grid, 6, 6)", "0.005474465860804227, 0.12962618248625338, 0.03090992138168193, 0.016043163973997736, 0.13259375374543056, 0.09920705802758992, 0.1415090600653345, 0.09597789664069131, 0.06106766497801195, 0.14032187015082653,", "[1.0]) grid = magnitude(grid, 0, [4, 13], 2) grid =", "= smooth_max(grid, 10, 5, 13) grid = sin(grid, 9, 10,", "0.21223490901058306, 0.36311130408652753, 0.09994467226348329, 0.12833432959710458, 0.1314519110369097]) grid = transit(grid, 8, [6,", "1, [7], 2) grid = smooth_min(grid, 7, 4, 13) grid", "3, 1.6405444007982959, -37.09230830685477) grid = transit(grid, 9, [8], [1.0]) grid", "grid = prod(grid, 15, [12, 15]) grid = prod(grid, 8,", "[8], [1.0]) grid = sin(grid, 5, 10, -1.5052434957207308, 24.900059771988836) grid", "0.13288430351514774, 0.10820806749406277, 0.21657812527174225]) grid = transit(grid, 3, [7, 3, 12,", "= sin(grid, 4, 5, -1.8457292172108153, -53.43885199947502) grid = sin(grid, 10,", "11, [2], [1.0]) #create color space def shift_colors(x, shift): res", "10], [0.6926745567135898, 0.1831142410590532, 0.12421120222735695]) grid = magnitude(grid, 7, [6, 12,", "- shift) - 1) ** (1 / (1 - shift))", "2 grid[:,:,4] = (x * -0.6644350461377522 + y * 0.1739322518414499)", "sin(grid, 12, 10, -3.1151555334821888, 17.571856948335267) grid = prod(grid, 6, [2,", "res / 1 res = ((res + 1) / 2", "4, 3.0281102269529683, 11.185401112275173) grid = sin(grid, 10, 4, 1.2844464834351186, -45.836492724169695)", "[12, 14, 4], 2) grid = shift(grid, 3, 9, 3.0393348894939773)", "37.95950546335726) grid = sin(grid, 12, 13, 3.6938747278005737, 76.37702042567852) grid =", "30.75954926767548) grid = inverse(grid, 8, 7) grid = prod(grid, 10,", "inverse(grid, 4, 5) grid = transit(grid, 1, [4, 14, 0,", "magnitude(grid, 0, [5, 0], 2) grid = transit(grid, 6, [15,", "grid = sin(grid, 11, 8, 4.303514875116891, -67.11152580467314) grid = prod(grid,", "14, 4], 2) grid = shift(grid, 3, 9, 3.0393348894939773) grid", "grid = transit(grid, 0, [4, 3, 8], [0.23275058190778222, 0.49901982570530873, 0.2682295923869092])", "13, 2, 4.295107938126156, 57.378601701270014) grid = sin(grid, 10, 2, -0.010214061334835559,", "grid = transit(grid, 6, [8, 6, 5, 7, 4, 2],", "grid = magnitude(grid, 11, [13, 10, 12, 2, 11, 14],", "0.08475226158626031, 0.1416941580568898, 0.020968563089492034, 0.0847896752697893, 0.0921589665387646, 0.008240731277180186, 0.17158558178481512]) grid = transit(grid,", "0, 8, 15, 2, 10, 14], [0.20381942291270427, 0.07753380798970702, 0.11445683149439734, 0.08475226158626031,", "(AMD64)] # For more information visit: https://github.com/volotat/GAS #import python libraries", "inverse(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx] = -x[:,:,s_indx] return", "1, 5, 0.6814927249849106, 30.75954926767548) grid = inverse(grid, 8, 7) grid", "shift(grid, 9, 13, -5.367438086043798) grid = magnitude(grid, 13, [2, 0],", "[1.0]) grid = sin(grid, 10, 9, 6.219381309190064, -71.03631884776823) grid =", "grid = magnitude(grid, 8, [10, 9, 12, 4, 7, 15],", "0.2728285912351676, 0.036877533709020166]) grid = transit(grid, 7, [11], [1.0]) grid =", "= shift(grid, 8, 9, 2.766857264282361) grid = transit(grid, 3, [6,", "= smooth_max(grid, 1, 15, 12) grid = prod(grid, 11, [3])", "shift(grid, 14, 2, 2.55681173849493) grid = sin(grid, 10, 14, 0.8649185298731181,", "sin(grid, 6, 14, -1.927951619591129, -65.3028706482776) grid = prod(grid, 14, [13])", "12.479110491961137) grid = magnitude(grid, 1, [7], 2) grid = smooth_min(grid,", "[3, 15, 9, 6, 11], [0.036102265915692405, 0.1224495166624379, 0.2384660328868578, 0.3357862916746864, 0.2671958928603256])", "0.36417402420248035, 0.02841485585755143, 0.19916101840344472, 0.03422984110049058, 0.03597196960697647]) grid = magnitude(grid, 13, [11,", "-0.5135707069423852) / 2 grid[:,:,9] = (x * -0.4075423366723827 + y", "smooth_max(grid, 10, 15, 10) grid = transit(grid, 11, [9, 0,", "2 grid[:,:,3] = (x * -0.8484277738516293 + y * -0.5155435342135386)", "[0.2662646690994658, 0.2460545507972383, 0.4876807801032959]) grid = transit(grid, 2, [7], [1.0]) grid", "+ 1) / 2) ** (1 + shift) - 1)", "2 - 1 if shift[i] < 0: res[:,:,i] = np.abs((1", "def magnitude(x, t_indx, s_indx, ord = 2): res = x.copy()", "np.sin(x[:,:,s_indx] * 0.5 * np.pi * scale + shift) return", "13], [0.381505247910628, 0.12073241493361198, 0.3454992433435407, 0.15226309381221942]) grid = magnitude(grid, 10, [7,", "grid = transit(grid, 0, [12, 6, 4, 9, 1, 0,", "4, 15, 0, 6], [0.24973877983541862, 0.3378766591098989, 0.15974656746239488, 0.027776085211312595, 0.02330072841260748, 0.20156117996836745])", "5, 9, 4, 6, 12], [0.18067242214638962, 0.12939497982917472, 0.08164480089591167, 0.24583958083442445, 0.2244518823086713,", "[0.08486049729383285, 0.15069099224942706, 0.024923245737924458, 0.07191051851248272, 0.25942601829807205, 0.16834508849259286, 0.14540219911263502, 0.094441440303033]) grid =", "grid = transit(grid, 13, [6, 2, 3, 15, 5, 7],", "10, 7], [0.5203714128788618, 0.068511863728177, 0.10141059844877331, 0.2728285912351676, 0.036877533709020166]) grid = transit(grid,", "# For more information visit: https://github.com/volotat/GAS #import python libraries import", "= np.abs((1 - (x[:,:,s_indx] + 1) / 2) ** (1", "grid = transit(grid, 1, [14], [1.0]) grid = transit(grid, 8,", "0.11516125867317799, 0.12240760599022518, 0.05066197369764289, 0.13869178538077429, 0.09948828746526778, 0.16686217850764798, 0.09926467338066268]) grid = transit(grid,", "sin(grid, 10, 0, 7.741409383532979, -12.082110529508299) grid = prod(grid, 11, [9])", "grid = transit(grid, 9, [8], [1.0]) grid = sin(grid, 5,", "magnitude(grid, 14, [4, 6, 1, 0], 2) grid = transit(grid,", "0, 15], 2) grid = prod(grid, 2, [3, 11, 1])", "[0.2785496566747933, 0.004915230889640017, 0.30146401859790545, 0.4150710938376613]) grid = sin(grid, 3, 11, -6.496603906160505,", "5], [0.18796556626817826, 0.19260744772691155, 0.11226112831146452, 0.08161640805634696, 0.08706050582840198, 0.2243337708440404, 0.11415517296465624]) grid =", "raise Exception('Array has None elements!') if np.amin(arr) < -1 or", "8, 10, 6) grid = prod(grid, 3, [2, 6, 10,", "2, 3, 9, 5], [0.24039798004748805, 0.2886075990223525, 0.18742374307846998, 0.11615833154358073, 0.16741234630810867]) grid", "8, 9, 12, 6, 11, 14], [0.10006330804326793, 0.03891760159161208, 0.005474465860804227, 0.12962618248625338,", "transit(grid, 2, [2, 0, 11, 10, 5, 4, 15, 13],", "8, 11, 15) grid = sin(grid, 12, 6, -3.621533174445339, 24.02414911462421)", "grid = transit(grid, 11, [12, 6, 9], [0.1597221050818672, 0.523275926379751, 0.31700196853838186])", "y = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((SIZE, 1)).repeat(SIZE, 1) grid[:,:,0]", "[7, 2, 3, 9, 5], [0.24039798004748805, 0.2886075990223525, 0.18742374307846998, 0.11615833154358073, 0.16741234630810867])", "[0.36336761526831185, 0.17372789204937897, 0.08036453739500136, 0.09747098994785518, 0.040818441056887325, 0.16796111771248814, 0.07628940657007711]) grid = transit(grid,", "4, [10, 0, 2, 4, 8, 5, 6, 7]) grid", "1, 0, 14], [0.36336761526831185, 0.17372789204937897, 0.08036453739500136, 0.09747098994785518, 0.040818441056887325, 0.16796111771248814, 0.07628940657007711])", "** (1 - shift[i]) - 1) ** (1 / (1", "13], [0.3265190472987195, 0.21568397721657098, 0.06226802479442838, 0.0028158122366541832, 0.39271313845362693]) grid = magnitude(grid, 11,", "return test_values(res) def magnitude(x, t_indx, s_indx, ord = 2): res", "8, [10, 9, 12, 4, 7, 15], 2) grid =", "return test_values(res) def shift(x, t_indx, s_indx, shift): res = x.copy()", "2 grid[:,:,2] = (x * 0.9804797761207309 + y * -0.5063344373124843)", "14, 0, 3, 11], [0.13835365002720226, 0.008781149737259792, 0.24627334258742545, 0.04870190081124998, 0.049950480577274, 0.15123046752435387,", "2 grid[:,:,6] = (x * 0.2265055481768512 + y * 0.4365452266748293)", "= prod(grid, 6, [2, 4, 13]) grid = transit(grid, 5,", "grid = transit(grid, 10, [14], [1.0]) grid = transit(grid, 15,", "* p) + np.exp(-x[:,:,s2_indx] * p)) ** (1/p)) / 1.07", "/ 2 grid[:,:,9] = (x * -0.4075423366723827 + y *", "6, 12, 8, 9, 0, 1], [0.05863158300898051, 0.3467981515651057, 0.262107802795733, 0.038001653167336905,", "= -1) return test_values(res.clip(-1,1)) def sin(x, t_indx, s_indx, scale =", "* 0.2265055481768512 + y * 0.4365452266748293) / 2 grid[:,:,7] =", "7, [6, 12, 7, 13, 8], 2) grid = transit(grid,", "-75.14180284322572) grid = prod(grid, 3, [14, 15]) grid = inverse(grid,", "0.03597196960697647]) grid = magnitude(grid, 13, [11, 7], 2) grid =", "sin(grid, 9, 10, -1.8565532127479274, -54.75186223635349) grid = transit(grid, 10, [14],", "grid = transit(grid, 7, [12, 11, 13, 4], [0.1713900685471786, 0.14082681623065177,", "10, 10, 0.7827958631857042, -90.82177259964699) grid = transit(grid, 6, [8, 6,", "grid = shift(grid, 3, 3, 2.4622222565241207) grid = sin(grid, 10,", "* -0.6644350461377522 + y * 0.1739322518414499) / 2 grid[:,:,5] =", "= magnitude(grid, 13, [11, 7], 2) grid = sin(grid, 4,", "0.07332571661903758]) grid = transit(grid, 11, [1, 15, 5, 0, 6,", "= inverse(grid, 11, 5) grid = magnitude(grid, 14, [4, 6,", "= shift(grid, 13, 3, 5.677279514103952) grid = transit(grid, 3, [15,", "import os #OS version: default import numpy as np #Numpy", "visit: https://github.com/volotat/GAS #import python libraries import os #OS version: default", "[0.37033495928182997, 0.6296650407181701]) grid = smooth_min(grid, 4, 1, 8) grid =", "magnitude(grid, 13, [11, 7], 2) grid = sin(grid, 4, 8,", "9, 10], [0.10267794314653868, 0.019022820046952493, 0.061606568183823145, 0.4832751235896067, 0.33341754503307897]) grid = transit(grid,", "[3, 13, 9, 8, 5, 2, 12], [0.05731677054419865, 0.08527765171582982, 0.33929504571762287,", "grid = magnitude(grid, 7, [6, 12, 7, 13, 8], 2)", "14, 0.8649185298731181, 3.1973516320924773) grid = sin(grid, 9, 7, -2.4657577404884132, 72.95418196004374)", "[8, 7, 11, 10, 15, 0, 5]) grid = transit(grid,", "10], [0.10267794314653868, 0.019022820046952493, 0.061606568183823145, 0.4832751235896067, 0.33341754503307897]) grid = transit(grid, 13,", "0.4234363710484886, 0.13338109758306646, 0.050712192407629864]) grid = transit(grid, 7, [14, 2, 13,", "-73.75617586359363) grid = transit(grid, 6, [6, 14], [0.7201753385758813, 0.2798246614241187]) grid", "as np #Numpy version: 1.19.5 from PIL import Image #PIL", "-5.367438086043798) grid = magnitude(grid, 13, [2, 0], 2) grid =", "[6, 2], [0.6857167761482571, 0.31428322385174284]) grid = shift(grid, 6, 15, 4.115946851379848)", "9, 6, 1.6821417847846682, -64.12547446801875) grid = sin(grid, 13, 3, -0.15800274281797377,", "1, [3], [1.0]) grid = magnitude(grid, 14, [4], 2) grid", "#define grid transformation methods def transit(x, t_indx, s_indx, alphas): res", "transit(grid, 12, [8, 11, 3], [0.2717231795161624, 0.38648847983305307, 0.3417883406507845]) grid =", "= transit(grid, 8, [11, 15, 0], [0.08195235243098883, 0.6796005904358621, 0.23844705713314918]) grid", "-1.5052434957207308, 24.900059771988836) grid = sin(grid, 8, 10, 2.5947698108630664, -90.74050288622541) grid", "0.09926467338066268]) grid = transit(grid, 6, [6, 13, 7], [0.16813621041531998, 0.42150135317124293,", "13, 15], [0.530662002197574, 0.1082014600047566, 0.36113653779766947]) grid = transit(grid, 14, [10,", "inverse(grid, 6, 6) grid = sin(grid, 7, 15, -4.9164570678736865, 86.15931416043557)", "prod(grid, 9, [10, 11, 8, 15, 0, 12, 3]) grid", "y * -0.45147169454413794) / 2 grid[:,:,1] = (x * 0.8090860808441245", "= sin(grid, 13, 3, -0.15800274281797377, 90.63950889076133) grid = sin(grid, 14,", "8, 5) grid = smooth_max(grid, 10, 5, 13) grid =", "10, 0, 0.5112825397666086, 37.95950546335726) grid = sin(grid, 12, 13, 3.6938747278005737,", "[1, 12, 15, 13, 3], [0.32356965941479515, 0.022696478437764827, 0.2132573540073865, 0.11957266769813353, 0.3209038404419199])", "grid = power(grid, 6, 5, 0.9223892145169746) grid = transit(grid, 2,", "layers img = np.zeros((SIZE * 4, SIZE * 4)) for", "10], [0.005204838856346087, 0.5116602651328436, 0.48313489601081044]) grid = transit(grid, 10, [10], [1.0])", "-47.618914508652054) grid = shift(grid, 9, 8, -1.1449289879251126) grid = transit(grid,", "6) grid = sin(grid, 7, 15, -4.9164570678736865, 86.15931416043557) grid =", "def test_values(arr): if np.isnan(arr).any(): raise Exception('Array has None elements!') if", "= x.copy() if shift > 0: res[:,:,t_indx] = (-np.abs(((x[:,:,s_indx] +", "9], [0.13643904772292245, 0.38438336340747, 0.15936221296996333, 0.31981537589964426]) grid = sin(grid, 10, 3,", "1, 4, 0], [0.2070905138265326, 0.06562120796792839, 0.17355051228662716, 0.05514926535269553, 0.0829726599151083, 0.41561584065110807]) grid", "transit(grid, 6, [15, 8], [0.5303803951305812, 0.4696196048694189]) grid = inverse(grid, 0,", "transit(grid, 11, [9, 0, 11, 7, 3, 8], [0.03500911832175082, 0.03265868671024263,", "0.39271313845362693]) grid = magnitude(grid, 11, [13, 10, 12, 2, 11,", "9], [0.1597221050818672, 0.523275926379751, 0.31700196853838186]) grid = sin(grid, 14, 7, 5.409920766787869,", "11, 14], 2) grid = transit(grid, 12, [8, 11, 3],", "sin(grid, 12, 6, -3.621533174445339, 24.02414911462421) grid = sin(grid, 1, 11,", "= sin(grid, 7, 2, 3.41043792019894, 65.36615977552518) grid = transit(grid, 0,", "sin(grid, 9, 6, 1.6821417847846682, -64.12547446801875) grid = sin(grid, 13, 3,", "11], [0.01847979792505241, 0.33442336387003857, 0.15192425697494277, 0.4951725812299663]) grid = sin(grid, 4, 8,", "15, 0, 12, 3]) grid = transit(grid, 13, [5, 15,", "transit(grid, 6, [9, 11, 2, 13], [0.381505247910628, 0.12073241493361198, 0.3454992433435407, 0.15226309381221942])", "12) grid = prod(grid, 11, [3]) grid = smooth_max(grid, 8,", "transit(grid, 15, [15], [1.0]) grid = prod(grid, 13, [6, 3,", "test_values(res) res = np.zeros((SIZE, SIZE, 3)) res += shift_colors(grid[:,:,0:1].repeat(3, -1),", "[15, 5, 9, 4, 6, 12], [0.18067242214638962, 0.12939497982917472, 0.08164480089591167, 0.24583958083442445,", "= magnitude(grid, 8, [10, 9, 12, 4, 7, 15], 2)", "0.14949729304492473]) grid = magnitude(grid, 10, [11, 0, 5], 2) grid", "* 2 - 1 if shift < 0: res[:,:,t_indx] =", "0, 5]) grid = transit(grid, 11, [7, 2, 3, 9,", "15], 2) grid = prod(grid, 2, [3, 11, 1]) grid", "1, -1.115193397983063) grid = smooth_max(grid, 13, 3, 8) grid =", "+ y * -0.45147169454413794) / 2 grid[:,:,1] = (x *", "[3], [1.0]) grid = magnitude(grid, 14, [4], 2) grid =", "[14, 2, 13, 1, 11, 3, 8, 7], [0.207462236904601, 0.11516125867317799,", "= (x * 0.9804797761207309 + y * -0.5063344373124843) / 2", "transit(grid, 7, [11], [1.0]) grid = transit(grid, 5, [9, 13,", "0.3649334871683411, 0.28062233683539095, 0.08637063851194285, 0.06076815802338077, 0.022574848472165728]) grid = transit(grid, 4, [11,", "15, 10) grid = transit(grid, 11, [9, 0, 11, 7,", "= magnitude(grid, 13, [7, 4, 15], 2) grid = transit(grid,", "4, 15], 2) grid = transit(grid, 13, [6, 15, 11,", "0.11525379082942891, 0.11556721863292163, 0.12372657123165616, 0.1356897031789931, 0.20047556686480725, 0.09921434949484752, 0.05399039482501285]) grid = transit(grid,", "15, 7, 1], [0.20378471182464508, 0.038241020379710625, 0.16903312106740406, 0.3387613981701764, 0.11303295854369695, 0.13714679001436697]) grid", "= sin(grid, 4, 9, 0.2366252211469413, -40.63773874328931) grid = sin(grid, 9,", "grid = sin(grid, 9, 15, -2.507870105026106, -89.43842740853354) grid = transit(grid,", "1 if shift[i] < 0: res[:,:,i] = np.abs((1 - (x", "0.19260744772691155, 0.11226112831146452, 0.08161640805634696, 0.08706050582840198, 0.2243337708440404, 0.11415517296465624]) grid = sin(grid, 11,", "= transit(grid, 6, [6, 13, 7], [0.16813621041531998, 0.42150135317124293, 0.410362436413437]) grid", "9, [15, 3, 11, 0, 14], 2) grid = sin(grid,", "[6, 12, 7, 13, 8], 2) grid = transit(grid, 8,", "1, 5], [0.18796556626817826, 0.19260744772691155, 0.11226112831146452, 0.08161640805634696, 0.08706050582840198, 0.2243337708440404, 0.11415517296465624]) grid", "15], [0.036901331671075975, 0.5054281720479712, 0.13288430351514774, 0.10820806749406277, 0.21657812527174225]) grid = transit(grid, 3,", "8], [0.5303803951305812, 0.4696196048694189]) grid = inverse(grid, 0, 0) grid =", "2.634465399239887, 62.07538440217337) grid = sin(grid, 7, 2, 3.41043792019894, 65.36615977552518) grid", "grid = sin(grid, 1, 12, -0.5111321725063378, 18.261359970959475) grid = power(grid,", "= sin(grid, 0, 3, -3.561651028660104, 11.539889679902203) grid = power(grid, 10,", "10, 4, 1.2844464834351186, -45.836492724169695) grid = sin(grid, 1, 2, -1.5301674594368837,", "magnitude(x, t_indx, s_indx, ord = 2): res = x.copy() res[:,:,t_indx]", "smooth_min(grid, 0, 13, 15) grid = smooth_max(grid, 5, 8, 4)", "* 0.7746597063144072) / 2 grid[:,:,12] = (x * -0.5303146721156469 +", "0.06106766497801195, 0.14032187015082653, 0.008288053054498123]) grid = prod(grid, 15, [12, 15]) grid", "grid = transit(grid, 6, [9, 11, 2, 13], [0.381505247910628, 0.12073241493361198,", "12, 2], [0.031587088727564654, 0.024264739611302585, 0.0306940545567164, 0.19611241111174804, 0.7173417059926683]) grid = transit(grid,", "13, 3, 5.677279514103952) grid = transit(grid, 3, [15, 11, 2,", "< 0: res[:,:,t_indx] = np.abs((1 - (x[:,:,s_indx] + 1) /", "9, [5], [1.0]) grid = transit(grid, 15, [12, 0, 1,", "0.049950480577274, 0.15123046752435387, 0.31255198044446264, 0.04415702829077187]) grid = transit(grid, 1, [3], [1.0])", "grid = magnitude(grid, 11, [8, 2], 2) grid = transit(grid,", "-30.256455817944243) grid = sin(grid, 8, 2, 3.501615294498545, -75.50049353340206) grid =", "4.115946851379848) grid = transit(grid, 15, [13, 3], [0.5897775709748927, 0.41022242902510725]) grid", "15, 8, 12, 0, 4, 14, 3, 5], [0.11084510086381213, 0.003439701966452383,", "np.zeros((SIZE, SIZE, 3)) res += shift_colors(grid[:,:,0:1].repeat(3, -1), [1.9355805467383669, 1.4677093499726706, 1.2451388311186942])", "-65.3028706482776) grid = prod(grid, 14, [13]) grid = sin(grid, 1,", "sin(grid, 10, 0, 0.5112825397666086, 37.95950546335726) grid = sin(grid, 12, 13,", "24.459526349523884) grid = inverse(grid, 15, 10) grid = shift(grid, 6,", "3, 5.677279514103952) grid = transit(grid, 3, [15, 11, 2, 8,", "9, [8], [1.0]) grid = sin(grid, 5, 10, -1.5052434957207308, 24.900059771988836)", "6, [14, 5, 13, 11, 2, 9], 2) grid =", "= shift(grid, 14, 2, 2.55681173849493) grid = sin(grid, 10, 14,", "0.30211530092641004]) grid = sin(grid, 5, 2, -2.2972705471452146, -12.522748365129786) grid =", "[0.20378471182464508, 0.038241020379710625, 0.16903312106740406, 0.3387613981701764, 0.11303295854369695, 0.13714679001436697]) grid = transit(grid, 4,", "y * 0.4365452266748293) / 2 grid[:,:,7] = (x * 0.5049774961793401", "= magnitude(grid, 14, [4, 6, 1, 0], 2) grid =", "grid = transit(grid, 4, [1, 12, 15, 13, 3], [0.32356965941479515,", "15, [3, 5, 0, 1]) grid = sin(grid, 6, 11,", "[0.3199750359220948, 0.07376266150860299, 0.03622483092076182, 0.09070212266434277, 0.4030414045204916, 0.07629394446370606]) grid = magnitude(grid, 13,", "0.11226112831146452, 0.08161640805634696, 0.08706050582840198, 0.2243337708440404, 0.11415517296465624]) grid = sin(grid, 11, 13,", "1, [7, 2, 6, 1, 4, 0], [0.2070905138265326, 0.06562120796792839, 0.17355051228662716,", "5, 1) grid = magnitude(grid, 0, [5, 0], 2) grid", "grid = sin(grid, 15, 0, -0.033265790773207085, 51.94880270063618) grid = smooth_min(grid,", "13, [2, 0], 2) grid = transit(grid, 13, [6, 2,", "0.49037959172682255 + y * -0.7671554143072785) / 2 #apply transformations to", "4) grid = transit(grid, 10, [1], [1.0]) grid = transit(grid,", "14, 0, 3, 15, 4, 2, 11, 13], [0.03597236183123865, 0.04938629068404894,", "0.024923245737924458, 0.07191051851248272, 0.25942601829807205, 0.16834508849259286, 0.14540219911263502, 0.094441440303033]) grid = transit(grid, 11,", "sin(grid, 10, 10, 0.9558311639914843, -47.618914508652054) grid = shift(grid, 9, 8,", "smooth_min(grid, 11, 8, 12) grid = transit(grid, 1, [1, 14,", "11, [3]) grid = smooth_max(grid, 8, 11, 15) grid =", "-0.5864100240508576 + y * -0.9425245660964123) / 2 grid[:,:,14] = (x", "grid = prod(grid, 15, [3, 5, 0, 1]) grid =", "grid = transit(grid, 2, [1, 2], [0.9078557995211777, 0.09214420047882232]) grid =", "- 1) ** (1 / (1 - shift[i])) * 2", "1: raise Exception('Values went to far! [ %.2f : %.2f", "0.016043163973997736, 0.13259375374543056, 0.09920705802758992, 0.1415090600653345, 0.09597789664069131, 0.06106766497801195, 0.14032187015082653, 0.008288053054498123]) grid =", "0.08036453739500136, 0.09747098994785518, 0.040818441056887325, 0.16796111771248814, 0.07628940657007711]) grid = transit(grid, 3, [11,", "sin(grid, 4, 3, 0.10154488887533689, 12.479110491961137) grid = magnitude(grid, 1, [7],", "10], [0.9817011300708863, 0.018298869929113594]) grid = sin(grid, 14, 8, -0.4693746108213766, -98.17810769380118)", "6, [8, 6, 5, 7, 4, 2], [0.39579476392315127, 0.3200094081197146, 0.06439062651950353,", "0, 2, 9, 4, 8], [0.06904450551777742, 0.12680650314665426, 0.1756104206123629, 0.013987480750913602, 0.1337935702206657,", "[1.0]) grid = prod(grid, 13, [6, 3, 7]) grid =", "0, 0.10854801586669052) grid = shift(grid, 8, 9, 2.766857264282361) grid =", "= transit(grid, 3, [7, 3, 12, 9], [0.13643904772292245, 0.38438336340747, 0.15936221296996333,", "= transit(grid, 12, [8, 11, 3], [0.2717231795161624, 0.38648847983305307, 0.3417883406507845]) grid", "4, 9, 13, 6], [0.3199750359220948, 0.07376266150860299, 0.03622483092076182, 0.09070212266434277, 0.4030414045204916, 0.07629394446370606])", "14, [14, 13, 15], [0.530662002197574, 0.1082014600047566, 0.36113653779766947]) grid = transit(grid,", "[0.005204838856346087, 0.5116602651328436, 0.48313489601081044]) grid = transit(grid, 10, [10], [1.0]) grid", "[0.30088974760959275, 0.6991102523904072]) grid = transit(grid, 8, [2, 11, 15, 4,", "sin(grid, 9, 4, 3.0281102269529683, 11.185401112275173) grid = sin(grid, 10, 4,", "0.0306940545567164, 0.19611241111174804, 0.7173417059926683]) grid = transit(grid, 0, [7, 1, 11,", "* scale + shift) return test_values(res) def magnitude(x, t_indx, s_indx,", "= sin(grid, 10, 4, 1.2844464834351186, -45.836492724169695) grid = sin(grid, 1,", "inverse(grid, 11, 5) grid = magnitude(grid, 14, [4, 6, 1,", "0.03422984110049058, 0.03597196960697647]) grid = magnitude(grid, 13, [11, 7], 2) grid", "transit(grid, 15, [11, 4, 10], [0.6926745567135898, 0.1831142410590532, 0.12421120222735695]) grid =", "grid = transit(grid, 2, [2, 0, 11, 10, 5, 4,", "SIZE)).repeat(SIZE, 0) y = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((SIZE, 1)).repeat(SIZE,", "0.14540219911263502, 0.094441440303033]) grid = transit(grid, 11, [12], [1.0]) grid =", "11, [0, 9], [0.1290607634325389, 0.8709392365674611]) grid = transit(grid, 14, [14,", "Exception('Array has None elements!') if np.amin(arr) < -1 or np.amax(arr)", "6, [6, 13, 7], [0.16813621041531998, 0.42150135317124293, 0.410362436413437]) grid = inverse(grid,", "-0.7671554143072785) / 2 #apply transformations to the grid grid =", "1)).repeat(SIZE, 1) grid[:,:,0] = (x * 0.9386329219527516 + y *", "9, 0, 1], [0.05863158300898051, 0.3467981515651057, 0.262107802795733, 0.038001653167336905, 0.2112967596903696, 0.002128256606899112, 0.08103579316557531])", "y * 0.7746597063144072) / 2 grid[:,:,12] = (x * -0.5303146721156469", "prod(grid, 10, [5, 2]) grid = transit(grid, 15, [0, 3],", "grid = inverse(grid, 5, 5) grid = transit(grid, 4, [8,", "0.061606568183823145, 0.4832751235896067, 0.33341754503307897]) grid = transit(grid, 13, [10, 8, 9,", "12) grid = transit(grid, 1, [1, 14, 8], [0.38986786543390084, 0.40057743619803005,", "14, 9], [0.33493798319460544, 0.14040206011900094, 0.3010385316537353, 0.07412413198773361, 0.14949729304492473]) grid = magnitude(grid,", "6, 9], [0.1597221050818672, 0.523275926379751, 0.31700196853838186]) grid = sin(grid, 14, 7,", "[3, 13, 2, 9, 0], [0.24803411847529433, 0.2425397323068922, 0.0904752958055755, 0.11683555248582808, 0.30211530092641004])", "0, 1, 11], [0.01847979792505241, 0.33442336387003857, 0.15192425697494277, 0.4951725812299663]) grid = sin(grid,", "8, 5, 6, 7]) grid = transit(grid, 8, [3], [1.0])", "0.08706050582840198, 0.2243337708440404, 0.11415517296465624]) grid = sin(grid, 11, 13, -6.909579361872105, 70.84834564082374)", "grid = shift(grid, 11, 5, 1.0526879494498724) grid = transit(grid, 1,", "3], [0.32356965941479515, 0.022696478437764827, 0.2132573540073865, 0.11957266769813353, 0.3209038404419199]) grid = transit(grid, 6,", "transit(grid, 3, [6, 14, 0, 3, 15, 4, 2, 11,", "15, 12) grid = prod(grid, 11, [3]) grid = smooth_max(grid,", "3, [14, 15]) grid = inverse(grid, 5, 5) grid =", "15, 10, 8, 5, 2, 3], [0.23701292672659616, 0.08316792464084911, 0.017867439461611043, 0.36417402420248035,", "= smooth_max(grid, 5, 8, 4) grid = transit(grid, 10, [1],", "date: 2021-11-28 09:20:21 UTC # GAS md5 hash: ad55481e87ca5a7e9a8e92cd336d1cad #", "= sin(grid, 10, 3, -2.5681840787633137, -30.256455817944243) grid = sin(grid, 8,", "grid[:,:,2] = (x * 0.9804797761207309 + y * -0.5063344373124843) /", "+ y * 0.2914526739617249) / 2 grid[:,:,2] = (x *", "return test_values(res) def prod(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx]", "12, 2, 11, 14], 2) grid = transit(grid, 12, [8,", "15, [15], [1.0]) grid = prod(grid, 13, [6, 3, 7])", "9], [0.1290607634325389, 0.8709392365674611]) grid = transit(grid, 14, [14, 13, 15],", "0.23844705713314918]) grid = power(grid, 14, 0, 0.10854801586669052) grid = shift(grid,", "0.08103579316557531]) grid = shift(grid, 3, 3, 2.4622222565241207) grid = sin(grid,", "0.7746597063144072) / 2 grid[:,:,12] = (x * -0.5303146721156469 + y", "transit(grid, 4, [7, 6, 12, 8, 9, 0, 1], [0.05863158300898051,", "= transit(grid, 6, [15], [1.0]) grid = sin(grid, 15, 0,", "0.22866693087969006, 0.1812166509589377]) grid = sin(grid, 4, 2, -3.329894296119046, -76.41676919069447) grid", "-90.74050288622541) grid = sin(grid, 9, 8, -0.8743741598911887, 15.92872484723533) grid =", "smooth_min(grid, 1, 1, 11) grid = transit(grid, 5, [11, 4,", "[0], [1.0]) grid = magnitude(grid, 0, [4, 13], 2) grid", "1.6265187305620117, -97.13150019385894) grid = transit(grid, 11, [0, 9], [0.1290607634325389, 0.8709392365674611])", "4.147364704932215) grid = transit(grid, 4, [3], [1.0]) grid = sin(grid,", "= prod(grid, 11, [3]) grid = smooth_max(grid, 8, 11, 15)", "3.1584260780059252) grid = transit(grid, 10, [9, 8], [0.7777441717493406, 0.22225582825065934]) grid", "11.539889679902203) grid = power(grid, 10, 5, 0.12539493928522222) grid = power(grid,", "4, 5) grid = transit(grid, 1, [4, 14, 0, 13],", "0.16686217850764798, 0.09926467338066268]) grid = transit(grid, 6, [6, 13, 7], [0.16813621041531998,", "[15, 7], 2) grid = transit(grid, 4, [4, 12, 14,", "[3, 5, 0, 1]) grid = sin(grid, 6, 11, -0.7697482296056479,", "= (x * -0.5986715486203882 + y * 0.9515468928881716) / 2", "0.017334502896306194, 0.1522407828502505]) grid = prod(grid, 2, [8, 7, 11, 10,", "# Generation date: 2021-11-28 09:21:40 UTC # GAS change date:", "power(grid, 10, 5, 0.12539493928522222) grid = power(grid, 0, 12, 2.5526439221510495)", "2, [2, 0, 11, 10, 5, 4, 15, 13], [0.1869735689344564,", "grid = transit(grid, 15, [11, 4, 10], [0.6926745567135898, 0.1831142410590532, 0.12421120222735695])", "768 GRID_CHANNELS = 16 def test_values(arr): if np.isnan(arr).any(): raise Exception('Array", "0.004915230889640017, 0.30146401859790545, 0.4150710938376613]) grid = sin(grid, 3, 11, -6.496603906160505, -73.75617586359363)", "0.11683555248582808, 0.30211530092641004]) grid = sin(grid, 5, 2, -2.2972705471452146, -12.522748365129786) grid", "sin(grid, 3, 12, 6.470760426148978, -53.62090724330151) grid = sin(grid, 10, 10,", "= sin(grid, 9, 4, 3.0281102269529683, 11.185401112275173) grid = sin(grid, 10,", "x = j % 4 y = j // 4", "255).clip(0,255) #save results im = Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__) + '.png') #save", "res[:,:,t_indx] = np.abs((1 - (x[:,:,s_indx] + 1) / 2) **", "grid = transit(grid, 12, [8, 11, 3], [0.2717231795161624, 0.38648847983305307, 0.3417883406507845])", "shift(grid, 3, 9, 3.0393348894939773) grid = shift(grid, 2, 4, 2.1961962516242517)", "+ 1) / 2 * 255).clip(0,255) #save results im =", "shift(grid, 2, 4, 2.1961962516242517) grid = prod(grid, 15, [3, 5,", "[5], [1.0]) grid = shift(grid, 9, 13, -5.367438086043798) grid =", "= transit(grid, 10, [1], [1.0]) grid = transit(grid, 15, [15],", "grid = sin(grid, 5, 10, -1.5052434957207308, 24.900059771988836) grid = sin(grid,", "= sin(grid, 6, 3, -0.1377650382373763, -96.34412250071645) grid = sin(grid, 7,", "= sin(grid, 1, 5, 0.6814927249849106, 30.75954926767548) grid = inverse(grid, 8,", "* -0.7671554143072785) / 2 #apply transformations to the grid grid", "4, 15, 9, 10], [0.10267794314653868, 0.019022820046952493, 0.061606568183823145, 0.4832751235896067, 0.33341754503307897]) grid", "shift(grid, 3, 3, 2.4622222565241207) grid = sin(grid, 10, 0, 0.5112825397666086,", "0.07191051851248272, 0.25942601829807205, 0.16834508849259286, 0.14540219911263502, 0.094441440303033]) grid = transit(grid, 11, [12],", "0.01906389787670339]) grid = sin(grid, 4, 7, 3.7705302330112063, 56.91558505626969) grid =", "4, -1.6398586072056767, 84.51374680259704) grid = sin(grid, 1, 1, -0.183401440709518, -88.40242580975152)", "11, 6, 1.576100090732909, -21.508000199215132) grid = shift(grid, 11, 5, 1.0526879494498724)", "6], [0.24973877983541862, 0.3378766591098989, 0.15974656746239488, 0.027776085211312595, 0.02330072841260748, 0.20156117996836745]) grid = smooth_min(grid,", "grid = transit(grid, 4, [4, 12, 14, 15, 7, 1],", "= 2): res = x.copy() res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx], axis =", "1) / 2 * 255).clip(0,255) #save results im = Image.fromarray(np.uint8(res))", "15) grid = magnitude(grid, 9, [15, 7], 2) grid =", "15], 2) grid = transit(grid, 13, [6, 15, 11, 9,", "14, -1.45141083652418, -99.85812912291547) grid = transit(grid, 0, [4, 3, 8],", "sin(grid, 1, 5, 8.18216846853571, -6.729427492311089) grid = magnitude(grid, 11, [8,", "0.1415090600653345, 0.09597789664069131, 0.06106766497801195, 0.14032187015082653, 0.008288053054498123]) grid = prod(grid, 15, [12,", "15, 0, 5]) grid = transit(grid, 11, [7, 2, 3,", "/ 2 grid[:,:,3] = (x * -0.8484277738516293 + y *", "grid[:,:,9] = (x * -0.4075423366723827 + y * 0.5388833863473126) /", "13, [1, 2, 7, 5, 8, 9, 15], [0.085742434722219, 0.4119764535375412,", "grid = smooth_min(grid, 11, 10, 9) grid = sin(grid, 13,", "transit(grid, 2, [11, 7, 13], [0.3629247592109436, 0.10073172896374764, 0.5363435118253088]) grid =", "prod(grid, 3, [14, 15]) grid = inverse(grid, 5, 5) grid", "> 0: res[:,:,i] = (-np.abs(((x[:,:,i] + 1) / 2) **", "= prod(grid, 14, [13]) grid = sin(grid, 1, 12, -0.5111321725063378,", "grid = transit(grid, 11, [1, 15, 5, 0, 6, 12,", "https://github.com/volotat/GAS #import python libraries import os #OS version: default import", "grid = prod(grid, 4, [10, 0, 2, 4, 8, 5,", "-37.11795195118333) grid = sin(grid, 11, 7, -0.3409112713023047, 75.93313567333723) grid =", "-0.6644350461377522 + y * 0.1739322518414499) / 2 grid[:,:,5] = (x", "grid = sin(grid, 4, 7, 3.7705302330112063, 56.91558505626969) grid = sin(grid,", "[4, 3, 8], [0.23275058190778222, 0.49901982570530873, 0.2682295923869092]) grid = magnitude(grid, 8,", "10, 1, 13, 5, 0, 7, 8, 9, 12, 6,", "version: 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64", "s_indx): res = x.copy() res[:,:,t_indx] = -x[:,:,s_indx] return test_values(res) def", "if shift[i] > 0: res[:,:,i] = (-np.abs(((x[:,:,i] + 1) /", "0.1224495166624379, 0.2384660328868578, 0.3357862916746864, 0.2671958928603256]) grid = smooth_min(grid, 1, 1, 11)", "4, 10, 5], [0.5076634403621766, 0.003404332378773421, 0.04142944289977586, 0.4475027843592742]) grid = inverse(grid,", "4, [8, 4, 15, 9, 10], [0.10267794314653868, 0.019022820046952493, 0.061606568183823145, 0.4832751235896067,", "14], [0.10006330804326793, 0.03891760159161208, 0.005474465860804227, 0.12962618248625338, 0.03090992138168193, 0.016043163973997736, 0.13259375374543056, 0.09920705802758992, 0.1415090600653345,", "[0.207462236904601, 0.11516125867317799, 0.12240760599022518, 0.05066197369764289, 0.13869178538077429, 0.09948828746526778, 0.16686217850764798, 0.09926467338066268]) grid =", "[1.0]) grid = transit(grid, 11, [2], [1.0]) #create color space", "13, [10, 8, 9, 12, 2], [0.031587088727564654, 0.024264739611302585, 0.0306940545567164, 0.19611241111174804,", "4, SIZE * 4)) for j in range(GRID_CHANNELS): x =", "0.19611241111174804, 0.7173417059926683]) grid = transit(grid, 0, [7, 1, 11, 0,", "= transit(grid, 6, [9, 11, 2, 13], [0.381505247910628, 0.12073241493361198, 0.3454992433435407,", "[12, 6, 9], [0.1597221050818672, 0.523275926379751, 0.31700196853838186]) grid = sin(grid, 14,", "- 1) ** (1 / (1 + shift[i])) + 1)", "7, [12, 11, 13, 4], [0.1713900685471786, 0.14082681623065177, 0.19859698568682838, 0.4891861295353413]) grid", "+ 1) * 2 - 1 if shift < 0:", "= transit(grid, 15, [12, 0, 1, 11], [0.01847979792505241, 0.33442336387003857, 0.15192425697494277,", "grid = transit(grid, 10, [15, 8, 13, 2], [0.32464063956303774, 0.20922781529873477,", "power(grid, 6, 5, 0.9223892145169746) grid = transit(grid, 2, [9, 11,", "(tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]", "+ 1) / 2) ** (1 - shift) - 1)", "def shift(x, t_indx, s_indx, shift): res = x.copy() if shift", "13, 3.6938747278005737, 76.37702042567852) grid = magnitude(grid, 15, [5, 3, 8,", "grid = sin(grid, 2, 5, -5.225820110717917, 57.71107021356826) grid = transit(grid,", "20.43114218394348) grid = transit(grid, 8, [1], [1.0]) grid = sin(grid,", "14, 4, 9, 13, 6], [0.3199750359220948, 0.07376266150860299, 0.03622483092076182, 0.09070212266434277, 0.4030414045204916,", "10, 2, -0.010214061334835559, 20.43114218394348) grid = transit(grid, 8, [1], [1.0])", "((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((1, SIZE)).repeat(SIZE, 0) y = ((np.arange(SIZE)/(SIZE-1)", "s1_indx, s2_indx, p = 10): res = x.copy() res[:,:,t_indx] =", "9, 0.2366252211469413, -40.63773874328931) grid = sin(grid, 9, 15, -2.507870105026106, -89.43842740853354)", "2.1961962516242517) grid = prod(grid, 15, [3, 5, 0, 1]) grid", "= smooth_min(grid, 7, 4, 13) grid = magnitude(grid, 5, [7],", "[2], [1.0]) #create color space def shift_colors(x, shift): res =", "10, 5, 4, 15, 13], [0.1869735689344564, 0.06343641920215143, 0.038951322931441136, 0.04613309733662021, 0.19750663742298355,", "grid = transit(grid, 1, [12, 8, 10, 4, 2], [0.43102537693091664,", "= sin(grid, 9, 9, -4.261918262131112, 18.680580924548693) grid = smooth_max(grid, 2,", "12]) grid = transit(grid, 7, [15, 6, 2, 7], [0.45073658968521574,", "0.29583940098242745, 0.2547330105267852]) grid = inverse(grid, 11, 5) grid = magnitude(grid,", "#set initial params SIZE = 768 GRID_CHANNELS = 16 def", "13, 12, 0, 8], [0.08486049729383285, 0.15069099224942706, 0.024923245737924458, 0.07191051851248272, 0.25942601829807205, 0.16834508849259286,", "transit(grid, 11, [5, 10, 7], [0.22694849313985146, 0.5162695719847235, 0.25678193487542517]) grid =", "+ y * -0.5063344373124843) / 2 grid[:,:,3] = (x *", "6, 11], [0.036102265915692405, 0.1224495166624379, 0.2384660328868578, 0.3357862916746864, 0.2671958928603256]) grid = smooth_min(grid,", "= (x * 0.8090860808441245 + y * 0.2914526739617249) / 2", "* -0.45147169454413794) / 2 grid[:,:,1] = (x * 0.8090860808441245 +", "2.766857264282361) grid = transit(grid, 3, [6, 14, 0, 3, 15,", "0.07629394446370606]) grid = magnitude(grid, 13, [7, 4, 15], 2) grid", "sin(grid, 0, 3, -3.561651028660104, 11.539889679902203) grid = power(grid, 10, 5,", "= sin(grid, 9, 5, -5.606152225672729, -35.928477282758536) grid = transit(grid, 0,", "9], [0.33493798319460544, 0.14040206011900094, 0.3010385316537353, 0.07412413198773361, 0.14949729304492473]) grid = magnitude(grid, 10,", "sin(grid, 4, 5, -1.8457292172108153, -53.43885199947502) grid = sin(grid, 10, 0,", "transit(grid, 13, [15, 5, 9, 4, 6, 12], [0.18067242214638962, 0.12939497982917472,", "= power(grid, 3, 5, 0.10200689258338674) grid = transit(grid, 2, [10,", "12], [0.18067242214638962, 0.12939497982917472, 0.08164480089591167, 0.24583958083442445, 0.2244518823086713, 0.13799633398542827]) grid = transit(grid,", "= sin(grid, 12, 4, -1.6398586072056767, 84.51374680259704) grid = sin(grid, 1,", "(x * -0.5986715486203882 + y * 0.9515468928881716) / 2 grid[:,:,6]", "grid = transit(grid, 8, [5, 4, 15, 6, 14, 0,", "in range(GRID_CHANNELS): x = j % 4 y = j", "14], [0.36336761526831185, 0.17372789204937897, 0.08036453739500136, 0.09747098994785518, 0.040818441056887325, 0.16796111771248814, 0.07628940657007711]) grid =", "(1 + shift[i]) - 1) ** (1 / (1 +", "grid = shift(grid, 2, 4, 2.1961962516242517) grid = prod(grid, 15,", "0, [7, 11, 15, 8, 12, 0, 4, 14, 3,", "= power(grid, 14, 0, 0.10854801586669052) grid = shift(grid, 8, 9,", "3, 12, -4.078686662791614, 24.459526349523884) grid = inverse(grid, 15, 10) grid", "color space def shift_colors(x, shift): res = x.copy() for i", "return test_values(res) def power(x, t_indx, s_indx, p = 1): res", "shift(grid, 6, 1, -1.115193397983063) grid = smooth_max(grid, 13, 3, 8)", "grid = np.zeros((SIZE, SIZE, GRID_CHANNELS)) x = ((np.arange(SIZE)/(SIZE-1) - 0.5)", "12, 3, 6], [0.1020239434902293, 0.05405846145210329, 0.11525379082942891, 0.11556721863292163, 0.12372657123165616, 0.1356897031789931, 0.20047556686480725,", "15, 6, 14, 0, 3, 11], [0.13835365002720226, 0.008781149737259792, 0.24627334258742545, 0.04870190081124998,", "[15], [1.0]) grid = sin(grid, 15, 0, -0.033265790773207085, 51.94880270063618) grid", "6, 15, 4.115946851379848) grid = transit(grid, 15, [13, 3], [0.5897775709748927,", "15, 0, -0.033265790773207085, 51.94880270063618) grid = smooth_min(grid, 13, 10, 15)", "4 y = j // 4 img[x*SIZE:(x + 1)*SIZE, y*SIZE:(y+1)*SIZE]", "6, 3, -0.1377650382373763, -96.34412250071645) grid = sin(grid, 7, 3, 1.6405444007982959,", "= prod(grid, 9, [10, 11, 8, 15, 0, 12, 3])", "program was generated by \"Generative Art Synthesizer\" # Generation date:", "0.12539493928522222) grid = power(grid, 0, 12, 2.5526439221510495) grid = sin(grid,", "9, [15, 7], 2) grid = transit(grid, 4, [4, 12,", "= x.copy() res[:,:,t_indx] = np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx]) ** p return", "0.14757838472737334]) grid = transit(grid, 2, [1, 7], [0.18247956114317448, 0.8175204388568255]) grid", "13, 10, 15) grid = transit(grid, 1, [12, 8, 10,", "/ 2 grid[:,:,13] = (x * -0.5864100240508576 + y *", "0.5363435118253088]) grid = sin(grid, 1, 5, 0.6814927249849106, 30.75954926767548) grid =", "[0, 1, 2, 14]) grid = prod(grid, 9, [10, 11,", "[12], [1.0]) grid = prod(grid, 14, [11, 10]) grid =", "= transit(grid, 3, [9], [1.0]) grid = transit(grid, 11, [2],", "15, 10) grid = shift(grid, 6, 1, -1.115193397983063) grid =", "5, 0.053526366336325744, 4.147364704932215) grid = transit(grid, 4, [3], [1.0]) grid", "5, [7], 2) grid = transit(grid, 6, [9, 11, 2,", "-98.17810769380118) grid = sin(grid, 12, 10, 3.6427863324838423, 99.297524709649) grid =", "0.10819642722960272, 0.15371289739415475, 0.25812192912399506, 0.005727171643985687, 0.14633649245899077, 0.033890406689391105, 0.05550396325806974, 0.1242259093715456]) grid =", "grid = transit(grid, 8, [11, 15, 0], [0.08195235243098883, 0.6796005904358621, 0.23844705713314918])", "8, 2, 3.501615294498545, -75.50049353340206) grid = prod(grid, 9, [1, 4,", "0.2591612025511646]) grid = transit(grid, 10, [11, 4, 2, 8, 14],", "= res / 1 res = ((res + 1) /", "grid = transit(grid, 10, [10], [1.0]) grid = transit(grid, 1,", "13, -5.367438086043798) grid = magnitude(grid, 13, [2, 0], 2) grid", "grid = shift(grid, 13, 3, 5.677279514103952) grid = transit(grid, 3,", "0.1082014600047566, 0.36113653779766947]) grid = transit(grid, 14, [10, 14, 4, 9,", "1]) grid = smooth_min(grid, 3, 2, 7) grid = smooth_max(grid,", "to far! [ %.2f : %.2f ]'%(np.amin(arr), np.amax(arr)) ) return", "4, 13]) grid = transit(grid, 5, [1, 9, 3, 10,", "was generated by \"Generative Art Synthesizer\" # Generation date: 2021-11-28", "13, 9, 8, 5, 2, 12], [0.05731677054419865, 0.08527765171582982, 0.33929504571762287, 0.1932983536368378,", "0.41022242902510725]) grid = sin(grid, 12, 14, 1.097917736937588, 58.87772371184383) grid =", "res = x.copy() for i in range(x.shape[-1]): if shift[i] >", "inverse(grid, 8, 7) grid = prod(grid, 10, [5, 2]) grid", "-0.4075423366723827 + y * 0.5388833863473126) / 2 grid[:,:,10] = (x", "9, 8, -1.1449289879251126) grid = transit(grid, 7, [4, 10, 1,", "10, [7, 15, 5], 2) grid = magnitude(grid, 9, [12,", "+ y * -0.5135707069423852) / 2 grid[:,:,9] = (x *", "0.48313489601081044]) grid = transit(grid, 10, [10], [1.0]) grid = transit(grid,", "2, [11, 7, 13], [0.3629247592109436, 0.10073172896374764, 0.5363435118253088]) grid = sin(grid,", "- 0.5) * 2).reshape((1, SIZE)).repeat(SIZE, 0) y = ((np.arange(SIZE)/(SIZE-1) -", "8, 9, 15], [0.085742434722219, 0.4119764535375412, 0.08377067725345017, 0.13045782410775286, 0.02917564277599849, 0.12489006625007311, 0.13398690135296518])", "= transit(grid, 8, [9, 10, 2, 15, 13], [0.3265190472987195, 0.21568397721657098,", "transit(grid, 10, [11, 4, 2, 8, 14], [0.3705316303566195, 0.1755951969700656, 0.043989590834687294,", "3, 2.634465399239887, 62.07538440217337) grid = sin(grid, 7, 2, 3.41043792019894, 65.36615977552518)", "magnitude(grid, 0, [4, 13], 2) grid = transit(grid, 8, [5,", "= (-np.abs(((x[:,:,s_indx] + 1) / 2) ** (1 + shift)", "sin(grid, 14, 5, 0.053526366336325744, 4.147364704932215) grid = transit(grid, 4, [3],", "[11, 4, 2, 1, 13, 12, 0, 8], [0.08486049729383285, 0.15069099224942706,", "20.537776250912316) grid = transit(grid, 7, [11, 9, 2], [0.5001532946669459, 0.42070604285213226,", "-76.78247379244436) grid = sin(grid, 2, 5, -5.225820110717917, 57.71107021356826) grid =", "* -0.9425245660964123) / 2 grid[:,:,14] = (x * -0.7665883618456049 +", "[15, 11, 2, 8, 0], [0.28772794692354614, 0.1935939805514465, 0.06024872230823076, 0.13457223936247906, 0.32385711085429764])", "[0.24075568684771534, 0.02527375632067568, 0.4828116495090197, 0.09546712897709621, 0.15569177834549294]) grid = sin(grid, 6, 3,", "0.21657812527174225]) grid = transit(grid, 3, [7, 3, 12, 9], [0.13643904772292245,", "1, [8, 10, 15, 14, 9], [0.33493798319460544, 0.14040206011900094, 0.3010385316537353, 0.07412413198773361,", "12, [3, 13, 2, 9, 0], [0.24803411847529433, 0.2425397323068922, 0.0904752958055755, 0.11683555248582808,", "0.30146401859790545, 0.4150710938376613]) grid = sin(grid, 3, 11, -6.496603906160505, -73.75617586359363) grid", "transit(grid, 2, [0, 15, 10], [0.005204838856346087, 0.5116602651328436, 0.48313489601081044]) grid =", "grid = prod(grid, 5, [3, 9, 2]) grid = sin(grid,", "= transit(grid, 10, [5, 11, 15, 8, 2, 13, 12,", "0, 3, -3.561651028660104, 11.539889679902203) grid = power(grid, 10, 5, 0.12539493928522222)", "GAS change date: 2021-11-28 09:20:21 UTC # GAS md5 hash:", "grid = transit(grid, 6, [6, 14], [0.7201753385758813, 0.2798246614241187]) grid =", "0.5071121900678415, 10.950101187785563) grid = shift(grid, 13, 3, 5.677279514103952) grid =", "13, [7, 4, 15], 2) grid = transit(grid, 13, [6,", "prod(grid, 2, [3, 11, 1]) grid = smooth_min(grid, 3, 2,", "0.21618454566402304, 0.046743011673522995, 0.05171405775355483]) grid = sin(grid, 10, 10, 0.9558311639914843, -47.618914508652054)", "10, -1.5052434957207308, 24.900059771988836) grid = sin(grid, 8, 10, 2.5947698108630664, -90.74050288622541)", "1, 7, 1.6265187305620117, -97.13150019385894) grid = transit(grid, 11, [0, 9],", "5, 2, 12], [0.05731677054419865, 0.08527765171582982, 0.33929504571762287, 0.1932983536368378, 0.0036374435750729187, 0.12289545051895708, 0.19827928429148084])", "13, [11, 0], [0.6569516962992897, 0.3430483037007103]) grid = sin(grid, 14, 5,", "default import numpy as np #Numpy version: 1.19.5 from PIL", "prod(grid, 3, [2, 6, 10, 7, 4]) grid = smooth_min(grid,", "sin(grid, 1, 5, 0.6814927249849106, 30.75954926767548) grid = inverse(grid, 8, 7)", "0.14032187015082653, 0.008288053054498123]) grid = prod(grid, 15, [12, 15]) grid =", "2.0751861425380627, 63.37681521624819) grid = smooth_min(grid, 11, 10, 9) grid =", "shift(grid, 6, 15, 4.115946851379848) grid = transit(grid, 15, [13, 3],", "-2.5681840787633137, -30.256455817944243) grid = sin(grid, 8, 2, 3.501615294498545, -75.50049353340206) grid", "prod(grid, 4, [10, 0, 2, 4, 8, 5, 6, 7])", "14, 5, 0.053526366336325744, 4.147364704932215) grid = transit(grid, 4, [3], [1.0])", "11, 9, 12], [0.21908823570589997, 0.1636179110868493, 0.03797238284324163, 0.29532957711092916, 0.2839918932530799]) grid =", "10, [9, 8], [0.7777441717493406, 0.22225582825065934]) grid = transit(grid, 3, [9],", "transit(grid, 7, [12, 11, 13, 4], [0.1713900685471786, 0.14082681623065177, 0.19859698568682838, 0.4891861295353413])", "9, [10, 11, 8, 15, 0, 12, 3]) grid =", "= 1, shift = 0): res = x.copy() res[:,:,t_indx] =", "2 grid[:,:,13] = (x * -0.5864100240508576 + y * -0.9425245660964123)", "15, [0, 3], [0.29345909580747953, 0.7065409041925205]) grid = sin(grid, 12, 4,", "11, 5, 1.0526879494498724) grid = transit(grid, 1, [14], [1.0]) grid", "/ 2 grid[:,:,7] = (x * 0.5049774961793401 + y *", "sin(grid, 10, 10, 0.7827958631857042, -90.82177259964699) grid = transit(grid, 6, [8,", "grid = transit(grid, 15, [7, 3], [0.9172074355564371, 0.08279256444356292]) grid =", "= transit(grid, 8, [3], [1.0]) grid = inverse(grid, 8, 5)", "2, 9, 0], [0.24803411847529433, 0.2425397323068922, 0.0904752958055755, 0.11683555248582808, 0.30211530092641004]) grid =", "6], [0.1020239434902293, 0.05405846145210329, 0.11525379082942891, 0.11556721863292163, 0.12372657123165616, 0.1356897031789931, 0.20047556686480725, 0.09921434949484752, 0.05399039482501285])", "-0.7665883618456049 + y * -0.3867357840809138) / 2 grid[:,:,15] = (x", "11, 7, 3, 8], [0.03500911832175082, 0.03265868671024263, 0.3248025339288217, 0.4234363710484886, 0.13338109758306646, 0.050712192407629864])", "15, 13], [0.3265190472987195, 0.21568397721657098, 0.06226802479442838, 0.0028158122366541832, 0.39271313845362693]) grid = magnitude(grid,", "= transit(grid, 7, [11, 9, 2], [0.5001532946669459, 0.42070604285213226, 0.07914066248092186]) grid", "prod(grid, 15, [12, 15]) grid = prod(grid, 8, [11, 7,", "[15, 8, 13, 2], [0.32464063956303774, 0.20922781529873477, 0.16179927966914437, 0.30433226546908315]) grid =", "7], [0.5203714128788618, 0.068511863728177, 0.10141059844877331, 0.2728285912351676, 0.036877533709020166]) grid = transit(grid, 7,", "transit(grid, 5, [9, 13, 3, 14], [0.28064413535886806, 0.5181512474389621, 0.1504742947642479, 0.050730322437922])", "sin(grid, 10, 9, 6.219381309190064, -71.03631884776823) grid = sin(grid, 9, 6,", "* 2 - 1 return test_values(res) def inverse(x, t_indx, s_indx):", "= sin(grid, 10, 0, 7.741409383532979, -12.082110529508299) grid = prod(grid, 11,", "0.027776085211312595, 0.02330072841260748, 0.20156117996836745]) grid = smooth_min(grid, 0, 5, 1) grid", "sin(grid, 6, 3, -0.1377650382373763, -96.34412250071645) grid = sin(grid, 7, 3,", "raise Exception('Values went to far! [ %.2f : %.2f ]'%(np.amin(arr),", "grid = power(grid, 0, 12, 2.5526439221510495) grid = sin(grid, 4,", "15, 2, 10, 14], [0.20381942291270427, 0.07753380798970702, 0.11445683149439734, 0.08475226158626031, 0.1416941580568898, 0.020968563089492034,", "transit(grid, 11, [7, 2, 3, 9, 5], [0.24039798004748805, 0.2886075990223525, 0.18742374307846998,", "transit(grid, 3, [9], [1.0]) grid = transit(grid, 11, [2], [1.0])", "(x[:,:,s_indx] + 1) / 2) ** (1 - shift) -", "0.40057743619803005, 0.20955469836806906]) grid = transit(grid, 9, [5], [1.0]) grid =", "12, 9, 11) grid = sin(grid, 4, 15, -1.9527829039221054, 20.537776250912316)", "0, 1) grid = sin(grid, 9, 4, 3.0281102269529683, 11.185401112275173) grid", "6, [15, 8], [0.5303803951305812, 0.4696196048694189]) grid = inverse(grid, 0, 0)", "0.13398690135296518]) grid = transit(grid, 2, [2, 0, 11, 10, 5,", "0.25678193487542517]) grid = sin(grid, 9, 9, -4.261918262131112, 18.680580924548693) grid =", "12, 13, 3.6938747278005737, 76.37702042567852) grid = magnitude(grid, 15, [5, 3,", "/ 2) ** (1 - shift[i]) - 1) ** (1", "0.020968563089492034, 0.0847896752697893, 0.0921589665387646, 0.008240731277180186, 0.17158558178481512]) grid = transit(grid, 5, [11,", "0.04938629068404894, 0.08457069101219464, 0.014801187461296406, 0.3649334871683411, 0.28062233683539095, 0.08637063851194285, 0.06076815802338077, 0.022574848472165728]) grid =", "+ shift)) + 1) * 2 - 1 if shift", "= (x * -0.5864100240508576 + y * -0.9425245660964123) / 2", "8, 12) grid = transit(grid, 1, [1, 14, 8], [0.38986786543390084,", "test_values(res) def power(x, t_indx, s_indx, p = 1): res =", "grid = magnitude(grid, 10, [11, 0, 5], 2) grid =", "= transit(grid, 7, [12, 11, 13, 4], [0.1713900685471786, 0.14082681623065177, 0.19859698568682838,", "8], [0.38986786543390084, 0.40057743619803005, 0.20955469836806906]) grid = transit(grid, 9, [5], [1.0])", "13, [11, 7], 2) grid = sin(grid, 4, 8, 4.28026157040775,", "* 0.9386329219527516 + y * -0.45147169454413794) / 2 grid[:,:,1] =", "transit(grid, 13, [11, 0], [0.6569516962992897, 0.3430483037007103]) grid = sin(grid, 14,", "ord = 2): res = x.copy() res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx], axis", "sin(grid, 1, 12, -0.5111321725063378, 18.261359970959475) grid = power(grid, 6, 5,", "10, 4, 2], [0.43102537693091664, 0.25433300797798253, 0.21618454566402304, 0.046743011673522995, 0.05171405775355483]) grid =", "[6, 14, 0, 3, 15, 4, 2, 11, 13], [0.03597236183123865,", "sin(grid, 12, 7, 1.439019575760617, 13.126437741104823) grid = transit(grid, 10, [15,", "10, 6) grid = prod(grid, 3, [2, 6, 10, 7,", "grid = inverse(grid, 4, 5) grid = transit(grid, 1, [4,", "= sin(grid, 12, 7, 1.439019575760617, 13.126437741104823) grid = transit(grid, 10,", "= smooth_min(grid, 0, 13, 15) grid = smooth_max(grid, 5, 8,", "= (x * 0.2265055481768512 + y * 0.4365452266748293) / 2", "11, -6.496603906160505, -73.75617586359363) grid = transit(grid, 6, [6, 14], [0.7201753385758813,", "8, 14], [0.3705316303566195, 0.1755951969700656, 0.043989590834687294, 0.22866693087969006, 0.1812166509589377]) grid = sin(grid,", "/ 2 grid[:,:,6] = (x * 0.2265055481768512 + y *", "grid = transit(grid, 8, [9, 3], [0.30088974760959275, 0.6991102523904072]) grid =", "15, [7, 3], [0.9172074355564371, 0.08279256444356292]) grid = transit(grid, 13, [1,", "[9, 3], [0.30088974760959275, 0.6991102523904072]) grid = transit(grid, 8, [2, 11,", "11) grid = sin(grid, 4, 15, -1.9527829039221054, 20.537776250912316) grid =", "11) grid = sin(grid, 13, 13, 7.718114740496995, 55.242200715207815) grid =", "8, 4.28026157040775, -75.14180284322572) grid = prod(grid, 3, [14, 15]) grid", "0.341923243761946, 0.0494872820880747, 0.29583940098242745, 0.2547330105267852]) grid = inverse(grid, 11, 5) grid", "(x * 0.9386329219527516 + y * -0.45147169454413794) / 2 grid[:,:,1]", "0.7371576932264431, 0.25224450337702853]) grid = sin(grid, 11, 8, 4.303514875116891, -67.11152580467314) grid", "1, 2.0751861425380627, 63.37681521624819) grid = smooth_min(grid, 11, 10, 9) grid", "grid = transit(grid, 14, [10, 14, 4, 9, 13, 6],", "= inverse(grid, 5, 12) grid = sin(grid, 10, 2, 0.9155140652310594,", "0.2886075990223525, 0.18742374307846998, 0.11615833154358073, 0.16741234630810867]) grid = prod(grid, 0, [0, 1,", "12, 9], [0.13643904772292245, 0.38438336340747, 0.15936221296996333, 0.31981537589964426]) grid = sin(grid, 10,", "[0.18067242214638962, 0.12939497982917472, 0.08164480089591167, 0.24583958083442445, 0.2244518823086713, 0.13799633398542827]) grid = transit(grid, 11,", "= sin(grid, 3, 11, -6.496603906160505, -73.75617586359363) grid = transit(grid, 6,", "15.92872484723533) grid = transit(grid, 4, [3, 13, 9, 8, 5,", "6, 1, 0], 2) grid = transit(grid, 13, [11, 0],", "1.0526879494498724) grid = transit(grid, 1, [14], [1.0]) grid = transit(grid,", "11, 13, 4], [0.1713900685471786, 0.14082681623065177, 0.19859698568682838, 0.4891861295353413]) grid = transit(grid,", "10, 3, -2.5681840787633137, -30.256455817944243) grid = sin(grid, 8, 2, 3.501615294498545,", "2) grid = sin(grid, 12, 7, 1.439019575760617, 13.126437741104823) grid =", "prod(grid, 15, [3, 5, 0, 1]) grid = sin(grid, 6,", "= transit(grid, 11, [0], [1.0]) grid = magnitude(grid, 0, [4,", "x = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((1, SIZE)).repeat(SIZE, 0) y", "= ord) / np.sqrt(len(s_indx)) return test_values(res) def shift(x, t_indx, s_indx,", "* 2).reshape((SIZE, 1)).repeat(SIZE, 1) grid[:,:,0] = (x * 0.9386329219527516 +", "4, 8], [0.06904450551777742, 0.12680650314665426, 0.1756104206123629, 0.013987480750913602, 0.1337935702206657, 0.39097327478734406, 0.08978424496428203]) grid", "transit(grid, 4, [3], [1.0]) grid = sin(grid, 3, 12, -4.078686662791614,", "0], [0.28772794692354614, 0.1935939805514465, 0.06024872230823076, 0.13457223936247906, 0.32385711085429764]) grid = transit(grid, 1,", "transit(grid, 0, [14, 3, 11, 10, 7], [0.5203714128788618, 0.068511863728177, 0.10141059844877331,", "0, 15], [0.036901331671075975, 0.5054281720479712, 0.13288430351514774, 0.10820806749406277, 0.21657812527174225]) grid = transit(grid,", "9, 1.4275963527158242, -76.78247379244436) grid = sin(grid, 2, 5, -5.225820110717917, 57.71107021356826)", "%.2f ]'%(np.amin(arr), np.amax(arr)) ) return arr #define grid transformation methods", "-88.40242580975152) grid = transit(grid, 12, [3, 13, 2, 9, 0],", "0.1932983536368378, 0.0036374435750729187, 0.12289545051895708, 0.19827928429148084]) grid = transit(grid, 8, [13, 9,", "* 0.1739322518414499) / 2 grid[:,:,5] = (x * -0.5986715486203882 +", "transit(grid, 11, [1, 15, 5, 0, 6, 12, 2, 7,", "0) y = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((SIZE, 1)).repeat(SIZE, 1)", "magnitude(grid, 13, [8], 2) grid = transit(grid, 13, [15, 5,", "= transit(grid, 14, [10, 14, 4, 9, 13, 6], [0.3199750359220948,", "0.4891861295353413]) grid = transit(grid, 13, [12, 15, 9, 2, 0,", "transit(grid, 8, [13, 9, 5, 7, 14], [0.05801706264076675, 0.341923243761946, 0.0494872820880747,", "transit(grid, 15, [0, 3], [0.29345909580747953, 0.7065409041925205]) grid = sin(grid, 12,", "= transit(grid, 15, [13, 3], [0.5897775709748927, 0.41022242902510725]) grid = sin(grid,", "* -0.5135707069423852) / 2 grid[:,:,9] = (x * -0.4075423366723827 +", "7, [11], [1.0]) grid = transit(grid, 5, [9, 13, 3,", "0.20156117996836745]) grid = smooth_min(grid, 0, 5, 1) grid = magnitude(grid,", "0.15123046752435387, 0.31255198044446264, 0.04415702829077187]) grid = transit(grid, 1, [3], [1.0]) grid", "[2, 6, 10, 7, 4]) grid = smooth_min(grid, 7, 12,", "11, 2, 8, 0], [0.28772794692354614, 0.1935939805514465, 0.06024872230823076, 0.13457223936247906, 0.32385711085429764]) grid", "+ np.exp(-x[:,:,s2_indx] * p)) ** (1/p)) / 1.07 return test_values(res)", "13, [13, 0, 5, 14], [0.09662806703796267, 0.1621478194912538, 0.21548762580464817, 0.5257364876661353]) grid", "0.08457069101219464, 0.014801187461296406, 0.3649334871683411, 0.28062233683539095, 0.08637063851194285, 0.06076815802338077, 0.022574848472165728]) grid = transit(grid,", "grid = sin(grid, 10, 10, 0.9558311639914843, -47.618914508652054) grid = shift(grid,", "1, -0.183401440709518, -88.40242580975152) grid = transit(grid, 12, [3, 13, 2,", "12) grid = sin(grid, 10, 2, 0.9155140652310594, -34.1653400637653) grid =", "0.019022820046952493, 0.061606568183823145, 0.4832751235896067, 0.33341754503307897]) grid = transit(grid, 13, [10, 8,", "4, [4, 12, 14, 15, 7, 1], [0.20378471182464508, 0.038241020379710625, 0.16903312106740406,", "+ shift[i])) + 1) * 2 - 1 if shift[i]", "0: res[:,:,t_indx] = np.abs((1 - (x[:,:,s_indx] + 1) / 2)", "[14], [1.0]) grid = transit(grid, 4, [1, 12, 15, 13,", "-4.078686662791614, 24.459526349523884) grid = inverse(grid, 15, 10) grid = shift(grid,", "2]) grid = sin(grid, 5, 1, 2.0751861425380627, 63.37681521624819) grid =", "2, [10, 11, 4, 15, 0, 6], [0.24973877983541862, 0.3378766591098989, 0.15974656746239488,", "[10, 14, 4, 9, 13, 6], [0.3199750359220948, 0.07376266150860299, 0.03622483092076182, 0.09070212266434277,", "8) grid = sin(grid, 4, 4, 3.47544933993972, -37.11795195118333) grid =", "or np.amax(arr) > 1: raise Exception('Values went to far! [", "/ 2 grid[:,:,1] = (x * 0.8090860808441245 + y *", "transit(grid, 6, [1, 7, 0, 2, 9, 4, 8], [0.06904450551777742,", "transit(grid, 13, [1, 2, 7, 5, 8, 9, 15], [0.085742434722219,", "[13, 0, 5, 14], [0.09662806703796267, 0.1621478194912538, 0.21548762580464817, 0.5257364876661353]) grid =", "6, [6, 14], [0.7201753385758813, 0.2798246614241187]) grid = prod(grid, 4, [10,", "0.31700196853838186]) grid = sin(grid, 14, 7, 5.409920766787869, -58.09956716630187) grid =", "[0.39579476392315127, 0.3200094081197146, 0.06439062651950353, 0.03284446726347166, 0.04732779189481446, 0.13963294227934445]) grid = smooth_min(grid, 0,", "grid = smooth_max(grid, 5, 8, 4) grid = transit(grid, 10,", "<reponame>lapaniku/GAS<gh_stars>10-100 # This program was generated by \"Generative Art Synthesizer\"", "0.06024872230823076, 0.13457223936247906, 0.32385711085429764]) grid = transit(grid, 1, [7, 2, 6,", "transit(grid, 7, [11, 9, 2], [0.5001532946669459, 0.42070604285213226, 0.07914066248092186]) grid =", "[3], [1.0]) grid = sin(grid, 3, 12, -4.078686662791614, 24.459526349523884) grid", "10.950101187785563) grid = shift(grid, 13, 3, 5.677279514103952) grid = transit(grid,", "5.677279514103952) grid = transit(grid, 3, [15, 11, 2, 8, 0],", "5, 2, 3], [0.23701292672659616, 0.08316792464084911, 0.017867439461611043, 0.36417402420248035, 0.02841485585755143, 0.19916101840344472, 0.03422984110049058,", "grid = smooth_min(grid, 0, 5, 1) grid = magnitude(grid, 0,", "transit(grid, 13, [10, 8, 9, 12, 2], [0.031587088727564654, 0.024264739611302585, 0.0306940545567164,", "0.9155140652310594, -34.1653400637653) grid = transit(grid, 8, [14], [1.0]) grid =", "0.02527375632067568, 0.4828116495090197, 0.09546712897709621, 0.15569177834549294]) grid = sin(grid, 6, 3, -0.1377650382373763,", "sin(grid, 1, 2, -1.5301674594368837, -60.29431568717391) grid = transit(grid, 2, [13,", "[13, 9, 5, 7, 14], [0.05801706264076675, 0.341923243761946, 0.0494872820880747, 0.29583940098242745, 0.2547330105267852])", "+ shift[i]) - 1) ** (1 / (1 + shift[i]))", "grid = sin(grid, 6, 11, -0.7697482296056479, 23.55348445076298) grid = sin(grid,", "grid = transit(grid, 1, [4, 14, 0, 13], [0.2785496566747933, 0.004915230889640017,", "2) grid = transit(grid, 6, [9, 11, 2, 13], [0.381505247910628,", "(-np.abs(((x[:,:,s_indx] + 1) / 2) ** (1 + shift) -", "+ y * 0.5388833863473126) / 2 grid[:,:,10] = (x *", "-2.2972705471452146, -12.522748365129786) grid = smooth_min(grid, 12, 9, 11) grid =", "0.5116602651328436, 0.48313489601081044]) grid = transit(grid, 10, [10], [1.0]) grid =", "1, shift = 0): res = x.copy() res[:,:,t_indx] = np.sin(x[:,:,s_indx]", "1, 0, 1) grid = sin(grid, 9, 4, 3.0281102269529683, 11.185401112275173)", "8, [5, 4, 15, 6, 14, 0, 3, 11], [0.13835365002720226,", "transit(grid, 2, [12], [1.0]) grid = prod(grid, 14, [11, 10])", "[11, 7, 13], [0.3629247592109436, 0.10073172896374764, 0.5363435118253088]) grid = sin(grid, 1,", "-45.01904701883333) grid = shift(grid, 5, 5, 3.1584260780059252) grid = transit(grid,", "t_indx, s_indx, alphas): res = x.copy() res[:,:,t_indx] = np.sum(x[:,:,s_indx] *", "0.09597789664069131, 0.06106766497801195, 0.14032187015082653, 0.008288053054498123]) grid = prod(grid, 15, [12, 15])", "\"Generative Art Synthesizer\" # Generation date: 2021-11-28 09:21:40 UTC #", "grid = sin(grid, 4, 8, 3.386521226555936, 60.95572898751007) grid = shift(grid,", "14, 7, 5.409920766787869, -58.09956716630187) grid = sin(grid, 2, 15, -2.5319898824657017,", "6], [0.3199750359220948, 0.07376266150860299, 0.03622483092076182, 0.09070212266434277, 0.4030414045204916, 0.07629394446370606]) grid = magnitude(grid,", "grid = transit(grid, 3, [6, 14, 0, 3, 15, 4,", "magnitude(grid, 11, [8, 2], 2) grid = transit(grid, 7, [12,", "grid = inverse(grid, 0, 0) grid = magnitude(grid, 13, [8],", "2.4622222565241207) grid = sin(grid, 10, 0, 0.5112825397666086, 37.95950546335726) grid =", "= x.copy() res[:,:,t_indx] = np.prod(x[:,:,s_indx], -1) return test_values(res) def power(x,", "[12], [1.0]) grid = power(grid, 3, 5, 0.10200689258338674) grid =", "= transit(grid, 8, [3, 15, 9, 6, 11], [0.036102265915692405, 0.1224495166624379,", "4.303514875116891, -67.11152580467314) grid = prod(grid, 5, [3, 9, 2]) grid", "0.038951322931441136, 0.04613309733662021, 0.19750663742298355, 0.16072124228620793, 0.15869932715876592, 0.14757838472737334]) grid = transit(grid, 2,", "[6, 2, 3, 15, 5, 7], [0.06492287400539203, 0.21223490901058306, 0.36311130408652753, 0.09994467226348329,", "[1.9355805467383669, 1.4677093499726706, 1.2451388311186942]) res = res / 1 res =", "0, 3, 11], [0.13835365002720226, 0.008781149737259792, 0.24627334258742545, 0.04870190081124998, 0.049950480577274, 0.15123046752435387, 0.31255198044446264,", "7, 4], [0.03047869593495055, 0.024092687676923453, 0.02665655056773558, 0.17667886361751853, 0.15211061797378253, 0.016462544099609754, 0.0072484377164178625, 0.4477791048998878,", "[1, 2, 7, 5, 8, 9, 15], [0.085742434722219, 0.4119764535375412, 0.08377067725345017,", "0.08527765171582982, 0.33929504571762287, 0.1932983536368378, 0.0036374435750729187, 0.12289545051895708, 0.19827928429148084]) grid = transit(grid, 8,", "= magnitude(grid, 11, [8, 2], 2) grid = transit(grid, 7,", "10) grid = transit(grid, 11, [9, 0, 11, 7, 3,", "elements!') if np.amin(arr) < -1 or np.amax(arr) > 1: raise", "3], [0.2717231795161624, 0.38648847983305307, 0.3417883406507845]) grid = transit(grid, 15, [7, 3],", "= np.log((np.exp(x[:,:,s1_indx] * p) + np.exp(x[:,:,s2_indx] * p)) ** (1/p))", "= transit(grid, 10, [10], [1.0]) grid = transit(grid, 1, [8,", "0.8709392365674611]) grid = transit(grid, 14, [14, 13, 15], [0.530662002197574, 0.1082014600047566,", "3, 15) grid = magnitude(grid, 9, [15, 7], 2) grid", "np.prod(x[:,:,s_indx], -1) return test_values(res) def power(x, t_indx, s_indx, p =", "[0, 3], [0.29345909580747953, 0.7065409041925205]) grid = sin(grid, 12, 4, -1.6398586072056767,", "14, 1.097917736937588, 58.87772371184383) grid = transit(grid, 11, [9, 11], [0.37033495928182997,", "5]) grid = transit(grid, 11, [7, 2, 3, 9, 5],", "= transit(grid, 7, [4, 10, 1, 13, 5, 0, 7,", "by \"Generative Art Synthesizer\" # Generation date: 2021-11-28 09:21:40 UTC", "5, 8, 4) grid = transit(grid, 10, [1], [1.0]) grid", "-0.5111321725063378, 18.261359970959475) grid = power(grid, 6, 5, 0.9223892145169746) grid =", "11, [13, 10, 12, 2, 11, 14], 2) grid =", "0.09070212266434277, 0.4030414045204916, 0.07629394446370606]) grid = magnitude(grid, 13, [7, 4, 15],", "= sin(grid, 11, 13, -6.909579361872105, 70.84834564082374) grid = transit(grid, 2,", "((res + 1) / 2 * 255).clip(0,255) #save results im", "grid[:,:,15] = (x * 0.49037959172682255 + y * -0.7671554143072785) /", "1) grid = magnitude(grid, 0, [5, 0], 2) grid =", "5, 2, -2.2972705471452146, -12.522748365129786) grid = smooth_min(grid, 12, 9, 11)", "-0.7697482296056479, 23.55348445076298) grid = sin(grid, 7, 7, 0.5492744322205282, 35.873568370773654) grid", "shift = 0): res = x.copy() res[:,:,t_indx] = np.sin(x[:,:,s_indx] *", "= transit(grid, 0, [12, 6, 4, 9, 1, 0, 14],", "grid = sin(grid, 7, 7, 0.5492744322205282, 35.873568370773654) grid = transit(grid,", "13, [8], 2) grid = transit(grid, 13, [15, 5, 9,", "SIZE, 3)) res += shift_colors(grid[:,:,0:1].repeat(3, -1), [1.9355805467383669, 1.4677093499726706, 1.2451388311186942]) res", "transformations to the grid grid = transit(grid, 4, [7, 6,", "grid = transit(grid, 13, [15, 5, 9, 4, 6, 12],", "0, 5], 2) grid = magnitude(grid, 9, [15, 3, 11,", "0.06076815802338077, 0.022574848472165728]) grid = transit(grid, 4, [11, 4, 15, 10,", "[0, 9], [0.1290607634325389, 0.8709392365674611]) grid = transit(grid, 14, [14, 13,", "0.11957266769813353, 0.3209038404419199]) grid = transit(grid, 6, [1, 7, 0, 2,", "0, 1], [0.05863158300898051, 0.3467981515651057, 0.262107802795733, 0.038001653167336905, 0.2112967596903696, 0.002128256606899112, 0.08103579316557531]) grid", "test_values(res.clip(-1,1)) def sin(x, t_indx, s_indx, scale = 1, shift =", "0.08279256444356292]) grid = transit(grid, 13, [1, 2, 7, 5, 8,", "grid = prod(grid, 11, [9]) grid = sin(grid, 4, 3,", "SIZE * 4)) for j in range(GRID_CHANNELS): x = j", "12, 7, 1.439019575760617, 13.126437741104823) grid = transit(grid, 10, [15, 8,", "0.09948828746526778, 0.16686217850764798, 0.09926467338066268]) grid = transit(grid, 6, [6, 13, 7],", "= transit(grid, 9, [5], [1.0]) grid = transit(grid, 15, [12,", "sin(grid, 13, 2, 4.295107938126156, 57.378601701270014) grid = sin(grid, 10, 2,", "5, [11, 10], [0.9817011300708863, 0.018298869929113594]) grid = sin(grid, 14, 8,", "[7, 2, 6, 1, 4, 0], [0.2070905138265326, 0.06562120796792839, 0.17355051228662716, 0.05514926535269553,", "2], [0.031587088727564654, 0.024264739611302585, 0.0306940545567164, 0.19611241111174804, 0.7173417059926683]) grid = transit(grid, 0,", "grid = transit(grid, 1, [7, 2, 6, 1, 4, 0],", "0.4477791048998878, 0.11849249751317383]) grid = transit(grid, 10, [5, 11, 15, 8,", "0, 0.5112825397666086, 37.95950546335726) grid = sin(grid, 12, 13, 3.6938747278005737, 76.37702042567852)", "= (x * -0.4075423366723827 + y * 0.5388833863473126) / 2", "0.04415702829077187]) grid = transit(grid, 1, [3], [1.0]) grid = magnitude(grid,", "57.378601701270014) grid = sin(grid, 10, 2, -0.010214061334835559, 20.43114218394348) grid =", "img = (img + 1) * 127.5 im = Image.fromarray(np.uint8(img))", "13, -6.909579361872105, 70.84834564082374) grid = transit(grid, 2, [11, 7, 13],", "[0.24039798004748805, 0.2886075990223525, 0.18742374307846998, 0.11615833154358073, 0.16741234630810867]) grid = prod(grid, 0, [0,", "11, [0], [1.0]) grid = magnitude(grid, 0, [4, 13], 2)", "15, 5, 0, 6, 12, 2, 7, 4], [0.03047869593495055, 0.024092687676923453,", "= transit(grid, 1, [1, 14, 8], [0.38986786543390084, 0.40057743619803005, 0.20955469836806906]) grid", "[15, 3, 11, 0, 14], 2) grid = sin(grid, 4,", "= transit(grid, 12, [7, 4, 10, 5], [0.5076634403621766, 0.003404332378773421, 0.04142944289977586,", "0.5388833863473126) / 2 grid[:,:,10] = (x * -0.4262457935185371 + y", "= transit(grid, 2, [0, 4, 2], [0.010597803396528332, 0.7371576932264431, 0.25224450337702853]) grid", "0.05066197369764289, 0.13869178538077429, 0.09948828746526778, 0.16686217850764798, 0.09926467338066268]) grid = transit(grid, 6, [6,", "0.4876807801032959]) grid = transit(grid, 2, [7], [1.0]) grid = sin(grid,", "4, 0, 6]) grid = transit(grid, 8, [9, 3], [0.30088974760959275,", "9, 15], [0.085742434722219, 0.4119764535375412, 0.08377067725345017, 0.13045782410775286, 0.02917564277599849, 0.12489006625007311, 0.13398690135296518]) grid", "grid = power(grid, 14, 0, 0.10854801586669052) grid = shift(grid, 8,", "0, 4, 14, 3, 5], [0.11084510086381213, 0.003439701966452383, 0.10819642722960272, 0.15371289739415475, 0.25812192912399506,", "[13, 3], [0.5897775709748927, 0.41022242902510725]) grid = sin(grid, 12, 14, 1.097917736937588,", "1, 8) grid = sin(grid, 4, 4, 3.47544933993972, -37.11795195118333) grid", "= magnitude(grid, 10, [7, 15, 5], 2) grid = magnitude(grid,", "11, [9, 11], [0.37033495928182997, 0.6296650407181701]) grid = smooth_min(grid, 4, 1,", "[0.43102537693091664, 0.25433300797798253, 0.21618454566402304, 0.046743011673522995, 0.05171405775355483]) grid = sin(grid, 10, 10,", "/ 2 grid[:,:,2] = (x * 0.9804797761207309 + y *", "/ 1.07 return test_values(res) def prod(x, t_indx, s_indx): res =", "0.19916101840344472, 0.03422984110049058, 0.03597196960697647]) grid = magnitude(grid, 13, [11, 7], 2)", "/ (1 - shift)) * 2 - 1 return test_values(res)", "= transit(grid, 0, [7, 1, 11, 0, 15], [0.036901331671075975, 0.5054281720479712,", "smooth_min(grid, 3, 2, 7) grid = smooth_max(grid, 8, 10, 6)", "14], 2) grid = transit(grid, 12, [8, 11, 3], [0.2717231795161624,", "10, 3, 15) grid = magnitude(grid, 9, [15, 7], 2)", "7, 4, 13) grid = magnitude(grid, 5, [7], 2) grid", "grid = smooth_min(grid, 1, 1, 11) grid = transit(grid, 5,", "0.12073241493361198, 0.3454992433435407, 0.15226309381221942]) grid = magnitude(grid, 10, [7, 15, 5],", "4)) for j in range(GRID_CHANNELS): x = j % 4", "0.38438336340747, 0.15936221296996333, 0.31981537589964426]) grid = sin(grid, 10, 3, -2.5681840787633137, -30.256455817944243)", "0.17355051228662716, 0.05514926535269553, 0.0829726599151083, 0.41561584065110807]) grid = transit(grid, 2, [0, 4,", "0.017867439461611043, 0.36417402420248035, 0.02841485585755143, 0.19916101840344472, 0.03422984110049058, 0.03597196960697647]) grid = magnitude(grid, 13,", "grid = sin(grid, 3, 9, 1.4275963527158242, -76.78247379244436) grid = sin(grid,", "13, 3, 14], [0.28064413535886806, 0.5181512474389621, 0.1504742947642479, 0.050730322437922]) grid = prod(grid,", "2 - 1 return test_values(res) res = np.zeros((SIZE, SIZE, 3))", "13, [12, 15, 9, 2, 0, 1, 5], [0.18796556626817826, 0.19260744772691155,", "% 4 y = j // 4 img[x*SIZE:(x + 1)*SIZE,", "shift[i]) - 1) ** (1 / (1 - shift[i])) *", "12, [8, 11, 3], [0.2717231795161624, 0.38648847983305307, 0.3417883406507845]) grid = transit(grid,", "transit(grid, 8, [9, 3], [0.30088974760959275, 0.6991102523904072]) grid = transit(grid, 8,", "< 0: res[:,:,i] = np.abs((1 - (x [:,:,i]+ 1) /", "[0.18796556626817826, 0.19260744772691155, 0.11226112831146452, 0.08161640805634696, 0.08706050582840198, 0.2243337708440404, 0.11415517296465624]) grid = sin(grid,", "4, 3.47544933993972, -37.11795195118333) grid = sin(grid, 11, 7, -0.3409112713023047, 75.93313567333723)", "4, [3, 13, 9, 8, 5, 2, 12], [0.05731677054419865, 0.08527765171582982,", "12, 2.5526439221510495) grid = sin(grid, 4, 10, -3.680544885171134, 30.633332441673872) grid", "6, [9, 11, 2, 13], [0.381505247910628, 0.12073241493361198, 0.3454992433435407, 0.15226309381221942]) grid", "* 0.5388833863473126) / 2 grid[:,:,10] = (x * -0.4262457935185371 +", "transit(grid, 0, [4, 3, 8], [0.23275058190778222, 0.49901982570530873, 0.2682295923869092]) grid =", "t_indx, s1_indx, s2_indx, p = 10): res = x.copy() res[:,:,t_indx]", "transit(grid, 8, [5, 4, 15, 6, 14, 0, 3, 11],", "(x * -0.6644350461377522 + y * 0.1739322518414499) / 2 grid[:,:,5]", "= transit(grid, 11, [9, 0, 11, 7, 3, 8], [0.03500911832175082,", "[0.32356965941479515, 0.022696478437764827, 0.2132573540073865, 0.11957266769813353, 0.3209038404419199]) grid = transit(grid, 6, [1,", "0.14040206011900094, 0.3010385316537353, 0.07412413198773361, 0.14949729304492473]) grid = magnitude(grid, 10, [11, 0,", "0.1504742947642479, 0.050730322437922]) grid = prod(grid, 1, [12, 13]) grid =", "[1], [1.0]) grid = transit(grid, 15, [15], [1.0]) grid =", "grid = shift(grid, 14, 2, 2.55681173849493) grid = sin(grid, 10,", "= inverse(grid, 15, 10) grid = shift(grid, 6, 1, -1.115193397983063)", "[5, 4, 15, 6, 14, 0, 3, 11], [0.13835365002720226, 0.008781149737259792,", "2).reshape((1, SIZE)).repeat(SIZE, 0) y = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((SIZE,", "res[:,:,t_indx] = np.log((np.exp(x[:,:,s1_indx] * p) + np.exp(x[:,:,s2_indx] * p)) **", "4, 1.2844464834351186, -45.836492724169695) grid = sin(grid, 1, 2, -1.5301674594368837, -60.29431568717391)", "[11, 7], 2) grid = sin(grid, 4, 8, 4.28026157040775, -75.14180284322572)", "-67.11152580467314) grid = prod(grid, 5, [3, 9, 2]) grid =", "0, 1]) grid = sin(grid, 6, 11, -0.7697482296056479, 23.55348445076298) grid", "[0.13237609957996088, 0.22944646977966682, 0.6381774306403722]) grid = transit(grid, 6, [15], [1.0]) grid", "more information visit: https://github.com/volotat/GAS #import python libraries import os #OS", "This program was generated by \"Generative Art Synthesizer\" # Generation", "grid = transit(grid, 1, [1, 14, 8], [0.38986786543390084, 0.40057743619803005, 0.20955469836806906])", "if shift[i] < 0: res[:,:,i] = np.abs((1 - (x [:,:,i]+", "grid[:,:,8] = (x * -0.3391983246964396 + y * -0.5135707069423852) /", "0.3430483037007103]) grid = sin(grid, 14, 5, 0.053526366336325744, 4.147364704932215) grid =", "= np.zeros((SIZE, SIZE, 3)) res += shift_colors(grid[:,:,0:1].repeat(3, -1), [1.9355805467383669, 1.4677093499726706,", "0.12289545051895708, 0.19827928429148084]) grid = transit(grid, 8, [13, 9, 5, 7,", "13], [0.3629247592109436, 0.10073172896374764, 0.5363435118253088]) grid = sin(grid, 1, 5, 0.6814927249849106,", "= inverse(grid, 1, 0) grid = smooth_max(grid, 1, 15, 12)", "sin(grid, 11, 7, -0.3409112713023047, 75.93313567333723) grid = transit(grid, 11, [5,", "= shift(grid, 5, 5, 3.1584260780059252) grid = transit(grid, 10, [9,", "im.save(os.path.basename(__file__) + '.png') #save layers img = np.zeros((SIZE * 4,", "= prod(grid, 9, [1, 4, 0, 6]) grid = transit(grid,", "ad55481e87ca5a7e9a8e92cd336d1cad # Python version: 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18)", "grid = transit(grid, 2, [11, 7, 13], [0.3629247592109436, 0.10073172896374764, 0.5363435118253088])", "= transit(grid, 7, [11], [1.0]) grid = transit(grid, 5, [9,", "= transit(grid, 15, [0, 3], [0.29345909580747953, 0.7065409041925205]) grid = sin(grid,", "0.13963294227934445]) grid = smooth_min(grid, 0, 13, 15) grid = smooth_max(grid,", "version: 1.19.5 from PIL import Image #PIL version: 8.1.2 #set", "= magnitude(grid, 0, [4, 13], 2) grid = transit(grid, 8,", "grid = smooth_min(grid, 4, 1, 8) grid = sin(grid, 4,", "0.19827928429148084]) grid = transit(grid, 8, [13, 9, 5, 7, 14],", "-58.09956716630187) grid = sin(grid, 2, 15, -2.5319898824657017, -45.01904701883333) grid =", "grid = transit(grid, 13, [10, 8, 9, 12, 2], [0.031587088727564654,", "+ '.png') #save layers img = np.zeros((SIZE * 4, SIZE", "0.42150135317124293, 0.410362436413437]) grid = inverse(grid, 6, 6) grid = sin(grid,", "4, 2, 1, 13, 12, 0, 8], [0.08486049729383285, 0.15069099224942706, 0.024923245737924458,", "1, 11, 3, 8, 7], [0.207462236904601, 0.11516125867317799, 0.12240760599022518, 0.05066197369764289, 0.13869178538077429,", "grid = transit(grid, 15, [0, 3], [0.29345909580747953, 0.7065409041925205]) grid =", "grid = transit(grid, 4, [11, 4, 15, 10, 8, 5,", "11, -0.7697482296056479, 23.55348445076298) grid = sin(grid, 7, 7, 0.5492744322205282, 35.873568370773654)", "0.2366252211469413, -40.63773874328931) grid = sin(grid, 9, 15, -2.507870105026106, -89.43842740853354) grid", "grid = transit(grid, 13, [1, 2, 7, 5, 8, 9,", "grid = prod(grid, 14, [11, 10]) grid = transit(grid, 2,", "9, [1, 4, 0, 6]) grid = transit(grid, 8, [9,", "grid = transit(grid, 13, [11, 0], [0.6569516962992897, 0.3430483037007103]) grid =", "6.470760426148978, -53.62090724330151) grid = sin(grid, 10, 10, 0.7827958631857042, -90.82177259964699) grid", "grid = transit(grid, 12, [7, 4, 10, 5], [0.5076634403621766, 0.003404332378773421,", "0.2265055481768512 + y * 0.4365452266748293) / 2 grid[:,:,7] = (x", "[0.031587088727564654, 0.024264739611302585, 0.0306940545567164, 0.19611241111174804, 0.7173417059926683]) grid = transit(grid, 0, [7,", "None elements!') if np.amin(arr) < -1 or np.amax(arr) > 1:", "0.12680650314665426, 0.1756104206123629, 0.013987480750913602, 0.1337935702206657, 0.39097327478734406, 0.08978424496428203]) grid = smooth_min(grid, 9,", "8, -1.1449289879251126) grid = transit(grid, 7, [4, 10, 1, 13,", "smooth_max(grid, 10, 3, 15) grid = magnitude(grid, 9, [15, 7],", "= transit(grid, 11, [0, 9], [0.1290607634325389, 0.8709392365674611]) grid = transit(grid,", "10, [11, 4, 2, 8, 14], [0.3705316303566195, 0.1755951969700656, 0.043989590834687294, 0.22866693087969006,", "grid = transit(grid, 9, [5], [1.0]) grid = shift(grid, 9,", "p return test_values(res) #set initial grid grid = np.zeros((SIZE, SIZE,", "* -0.41048419195488317) / 2 grid[:,:,13] = (x * -0.5864100240508576 +", "grid = inverse(grid, 7, 8) grid = smooth_max(grid, 10, 3,", "** (1 / (1 + shift[i])) + 1) * 2", "5, -1.8457292172108153, -53.43885199947502) grid = sin(grid, 10, 0, 7.741409383532979, -12.082110529508299)", "s_indx, scale = 1, shift = 0): res = x.copy()", "8, 1, -0.2952350240798842) grid = sin(grid, 11, 6, 1.576100090732909, -21.508000199215132)", "transit(grid, 10, [15, 8, 13, 2], [0.32464063956303774, 0.20922781529873477, 0.16179927966914437, 0.30433226546908315])", "grid = prod(grid, 6, [2, 4, 13]) grid = transit(grid,", "11, 8, 15, 0, 12, 3]) grid = transit(grid, 13,", "return test_values(res) def smooth_max(x, t_indx, s1_indx, s2_indx, p = 10):", "11, 3], [0.2717231795161624, 0.38648847983305307, 0.3417883406507845]) grid = transit(grid, 15, [7,", "sin(grid, 15, 0, -0.033265790773207085, 51.94880270063618) grid = smooth_min(grid, 13, 10,", "res[:,:,t_indx] = -np.log((np.exp(-x[:,:,s1_indx] * p) + np.exp(-x[:,:,s2_indx] * p)) **", "0.5181512474389621, 0.1504742947642479, 0.050730322437922]) grid = prod(grid, 1, [12, 13]) grid", "13], [0.2785496566747933, 0.004915230889640017, 0.30146401859790545, 0.4150710938376613]) grid = sin(grid, 3, 11,", "0.22225582825065934]) grid = transit(grid, 3, [9], [1.0]) grid = transit(grid,", "transit(grid, 10, [10], [1.0]) grid = transit(grid, 1, [8, 10,", "3, 9, 3.0393348894939773) grid = shift(grid, 2, 4, 2.1961962516242517) grid", "7, -2.4657577404884132, 72.95418196004374) grid = transit(grid, 12, [7, 4, 10,", "power(grid, 0, 12, 2.5526439221510495) grid = sin(grid, 4, 10, -3.680544885171134,", "return test_values(res) res = np.zeros((SIZE, SIZE, 3)) res += shift_colors(grid[:,:,0:1].repeat(3,", "0.1522407828502505]) grid = prod(grid, 2, [8, 7, 11, 10, 15,", "0.1621478194912538, 0.21548762580464817, 0.5257364876661353]) grid = inverse(grid, 1, 0) grid =", "0.5049774961793401 + y * 0.05113255120007798) / 2 grid[:,:,8] = (x", "return test_values(res) #set initial grid grid = np.zeros((SIZE, SIZE, GRID_CHANNELS))", "= transit(grid, 12, [3, 13, 2, 9, 0], [0.24803411847529433, 0.2425397323068922,", "return test_values(res) def inverse(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx]", "2 grid[:,:,8] = (x * -0.3391983246964396 + y * -0.5135707069423852)", "[10, 8, 9, 12, 2], [0.031587088727564654, 0.024264739611302585, 0.0306940545567164, 0.19611241111174804, 0.7173417059926683])", "[9, 13, 3, 14], [0.28064413535886806, 0.5181512474389621, 0.1504742947642479, 0.050730322437922]) grid =", "((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((SIZE, 1)).repeat(SIZE, 1) grid[:,:,0] = (x", "10, 5, 0.12539493928522222) grid = power(grid, 0, 12, 2.5526439221510495) grid", "6, 2, 7], [0.45073658968521574, 0.16060948991238613, 0.12949271785123345, 0.2591612025511646]) grid = transit(grid,", "grid = sin(grid, 13, 2, 4.295107938126156, 57.378601701270014) grid = sin(grid,", "0.15371289739415475, 0.25812192912399506, 0.005727171643985687, 0.14633649245899077, 0.033890406689391105, 0.05550396325806974, 0.1242259093715456]) grid = smooth_max(grid,", "2) grid = transit(grid, 13, [6, 15, 11, 9, 12],", "[7], 2) grid = smooth_min(grid, 7, 4, 13) grid =", "0.09994467226348329, 0.12833432959710458, 0.1314519110369097]) grid = transit(grid, 8, [6, 2], [0.6857167761482571,", "4, 8, 3.386521226555936, 60.95572898751007) grid = shift(grid, 14, 2, 2.55681173849493)", "4, 1, 0, 14], [0.29712982335534416, 0.2526657169525107, 0.08415696601637544, 0.18541009701166816, 0.011062110917544764, 0.017334502896306194,", "= inverse(grid, 8, 7) grid = prod(grid, 10, [5, 2])", "[0.2717231795161624, 0.38648847983305307, 0.3417883406507845]) grid = transit(grid, 15, [7, 3], [0.9172074355564371,", "i in range(x.shape[-1]): if shift[i] > 0: res[:,:,i] = (-np.abs(((x[:,:,i]", "1 if shift < 0: res[:,:,t_indx] = np.abs((1 - (x[:,:,s_indx]", "18:58:18) [MSC v.1900 64 bit (AMD64)] # For more information", "0.8435706697714382 + y * 0.7746597063144072) / 2 grid[:,:,12] = (x", "(1 / (1 + shift)) + 1) * 2 -", "2).reshape((SIZE, 1)).repeat(SIZE, 1) grid[:,:,0] = (x * 0.9386329219527516 + y", "5], 2) grid = magnitude(grid, 9, [12, 14, 4], 2)", "#OS version: default import numpy as np #Numpy version: 1.19.5", "-2.507870105026106, -89.43842740853354) grid = transit(grid, 0, [12, 6, 4, 9,", "5, 7, 4, 2], [0.39579476392315127, 0.3200094081197146, 0.06439062651950353, 0.03284446726347166, 0.04732779189481446, 0.13963294227934445])", "= smooth_min(grid, 3, 2, 7) grid = smooth_max(grid, 8, 10,", "0.28062233683539095, 0.08637063851194285, 0.06076815802338077, 0.022574848472165728]) grid = transit(grid, 4, [11, 4,", "15, -4.9164570678736865, 86.15931416043557) grid = sin(grid, 1, 7, 1.6265187305620117, -97.13150019385894)", "13]) grid = sin(grid, 6, 14, -1.927951619591129, -65.3028706482776) grid =", "4, 13) grid = magnitude(grid, 5, [7], 2) grid =", "prod(grid, 5, [3, 9, 2]) grid = sin(grid, 5, 1,", "2], [0.32464063956303774, 0.20922781529873477, 0.16179927966914437, 0.30433226546908315]) grid = magnitude(grid, 6, [14,", "5, 0.9223892145169746) grid = transit(grid, 2, [9, 11, 10], [0.2662646690994658,", "0.1356897031789931, 0.20047556686480725, 0.09921434949484752, 0.05399039482501285]) grid = transit(grid, 9, [5], [1.0])", "(x * 0.2265055481768512 + y * 0.4365452266748293) / 2 grid[:,:,7]", "grid = sin(grid, 0, 3, -3.561651028660104, 11.539889679902203) grid = power(grid,", "= transit(grid, 1, [12, 8, 10, 4, 2], [0.43102537693091664, 0.25433300797798253,", "0.33442336387003857, 0.15192425697494277, 0.4951725812299663]) grid = sin(grid, 4, 8, 3.386521226555936, 60.95572898751007)", "[1], [1.0]) grid = sin(grid, 4, 9, 0.2366252211469413, -40.63773874328931) grid", "0.018298869929113594]) grid = sin(grid, 14, 8, -0.4693746108213766, -98.17810769380118) grid =", "grid grid = transit(grid, 4, [7, 6, 12, 8, 9,", "[2, 0, 11, 10, 5, 4, 15, 13], [0.1869735689344564, 0.06343641920215143,", "86.15931416043557) grid = sin(grid, 1, 7, 1.6265187305620117, -97.13150019385894) grid =", "1 res = ((res + 1) / 2 * 255).clip(0,255)", "# GAS md5 hash: ad55481e87ca5a7e9a8e92cd336d1cad # Python version: 3.7.9 (tags/v3.7.9:13c94747c7,", "= transit(grid, 4, [4, 12, 14, 15, 7, 1], [0.20378471182464508,", "0.043989590834687294, 0.22866693087969006, 0.1812166509589377]) grid = sin(grid, 4, 2, -3.329894296119046, -76.41676919069447)", "-60.29431568717391) grid = transit(grid, 2, [13, 11, 5], [0.421270391024163, 0.5054038923567993,", "scale = 1, shift = 0): res = x.copy() res[:,:,t_indx]", "grid = magnitude(grid, 13, [2, 0], 2) grid = transit(grid,", "3, 8, 0, 15], 2) grid = prod(grid, 2, [3,", "0.6814927249849106, 30.75954926767548) grid = inverse(grid, 8, 7) grid = prod(grid,", "= grid[:,:,j] img = (img + 1) * 127.5 im", "= (x * -0.5303146721156469 + y * -0.41048419195488317) / 2", "0.05514926535269553, 0.0829726599151083, 0.41561584065110807]) grid = transit(grid, 2, [0, 4, 2],", "0.04870190081124998, 0.049950480577274, 0.15123046752435387, 0.31255198044446264, 0.04415702829077187]) grid = transit(grid, 1, [3],", "grid = sin(grid, 10, 10, 0.7827958631857042, -90.82177259964699) grid = transit(grid,", "-np.log((np.exp(-x[:,:,s1_indx] * p) + np.exp(-x[:,:,s2_indx] * p)) ** (1/p)) /", "8, 2, 13, 12, 3, 6], [0.1020239434902293, 0.05405846145210329, 0.11525379082942891, 0.11556721863292163,", "** (1/p)) / 1.07 return test_values(res) def smooth_min(x, t_indx, s1_indx,", "7, 0, 2, 9, 4, 8], [0.06904450551777742, 0.12680650314665426, 0.1756104206123629, 0.013987480750913602,", "0.0494872820880747, 0.29583940098242745, 0.2547330105267852]) grid = inverse(grid, 11, 5) grid =", "grid = smooth_max(grid, 1, 0, 1) grid = sin(grid, 9,", "1, 11], [0.01847979792505241, 0.33442336387003857, 0.15192425697494277, 0.4951725812299663]) grid = sin(grid, 4,", "shift < 0: res[:,:,t_indx] = np.abs((1 - (x[:,:,s_indx] + 1)", "res = x.copy() res[:,:,t_indx] = -x[:,:,s_indx] return test_values(res) def smooth_max(x,", "= transit(grid, 4, [7, 6, 12, 8, 9, 0, 1],", "1, 2, -1.5301674594368837, -60.29431568717391) grid = transit(grid, 2, [13, 11,", "grid = transit(grid, 3, [7, 3, 12, 9], [0.13643904772292245, 0.38438336340747,", "j % 4 y = j // 4 img[x*SIZE:(x +", "[8, 11, 3], [0.2717231795161624, 0.38648847983305307, 0.3417883406507845]) grid = transit(grid, 15,", "transit(grid, 11, [9, 11], [0.37033495928182997, 0.6296650407181701]) grid = smooth_min(grid, 4,", "[15, 8], [0.5303803951305812, 0.4696196048694189]) grid = inverse(grid, 0, 0) grid", "= transit(grid, 8, [9, 3], [0.30088974760959275, 0.6991102523904072]) grid = transit(grid,", "2, 4.295107938126156, 57.378601701270014) grid = sin(grid, 10, 2, -0.010214061334835559, 20.43114218394348)", "4, 7, 15], 2) grid = sin(grid, 12, 7, 1.439019575760617,", "= prod(grid, 10, [5, 2]) grid = transit(grid, 15, [0,", "shift) return test_values(res) def magnitude(x, t_indx, s_indx, ord = 2):", "prod(grid, 14, [11, 10]) grid = transit(grid, 2, [0, 15,", "= sin(grid, 13, 13, 7.718114740496995, 55.242200715207815) grid = sin(grid, 12,", "t_indx, s_indx, shift): res = x.copy() if shift > 0:", "(x * -0.5864100240508576 + y * -0.9425245660964123) / 2 grid[:,:,14]", "12, -0.5111321725063378, 18.261359970959475) grid = power(grid, 6, 5, 0.9223892145169746) grid", "13], [0.1869735689344564, 0.06343641920215143, 0.038951322931441136, 0.04613309733662021, 0.19750663742298355, 0.16072124228620793, 0.15869932715876592, 0.14757838472737334]) grid", "= sin(grid, 9, 15, -2.507870105026106, -89.43842740853354) grid = transit(grid, 0,", "11, 8, 12) grid = transit(grid, 1, [1, 14, 8],", "0.003439701966452383, 0.10819642722960272, 0.15371289739415475, 0.25812192912399506, 0.005727171643985687, 0.14633649245899077, 0.033890406689391105, 0.05550396325806974, 0.1242259093715456]) grid", "8], [0.7777441717493406, 0.22225582825065934]) grid = transit(grid, 3, [9], [1.0]) grid", "+ y * 0.05113255120007798) / 2 grid[:,:,8] = (x *", "9, 2, 0, 1, 5], [0.18796556626817826, 0.19260744772691155, 0.11226112831146452, 0.08161640805634696, 0.08706050582840198,", "[9], [1.0]) grid = transit(grid, 11, [2], [1.0]) #create color", "sin(grid, 14, 8, -0.4693746108213766, -98.17810769380118) grid = sin(grid, 12, 10,", "9, 0, 8, 15, 2, 10, 14], [0.20381942291270427, 0.07753380798970702, 0.11445683149439734,", "0.29532957711092916, 0.2839918932530799]) grid = sin(grid, 4, 3, 2.634465399239887, 62.07538440217337) grid", "grid = transit(grid, 11, [0], [1.0]) grid = magnitude(grid, 0,", "sin(grid, 14, 14, -1.842523240371888, 74.23947694195837) grid = inverse(grid, 7, 8)", "= transit(grid, 4, [1, 12, 15, 13, 3], [0.32356965941479515, 0.022696478437764827,", "shift)) + 1) * 2 - 1 if shift <", "= magnitude(grid, 1, [7], 2) grid = smooth_min(grid, 7, 4,", "+ 1) * 2 - 1 if shift[i] < 0:", "transit(grid, 5, [1, 9, 3, 10, 4], [0.24075568684771534, 0.02527375632067568, 0.4828116495090197,", "0.15226309381221942]) grid = magnitude(grid, 10, [7, 15, 5], 2) grid", "test_values(res) def magnitude(x, t_indx, s_indx, ord = 2): res =", "4, 14, 3, 5], [0.11084510086381213, 0.003439701966452383, 0.10819642722960272, 0.15371289739415475, 0.25812192912399506, 0.005727171643985687,", "11, [7, 2, 3, 9, 5], [0.24039798004748805, 0.2886075990223525, 0.18742374307846998, 0.11615833154358073,", "0.19750663742298355, 0.16072124228620793, 0.15869932715876592, 0.14757838472737334]) grid = transit(grid, 2, [1, 7],", "12, 4, 7, 15], 2) grid = sin(grid, 12, 7,", "2) ** (1 + shift) - 1) ** (1 /", "Exception('Values went to far! [ %.2f : %.2f ]'%(np.amin(arr), np.amax(arr))", "= x.copy() res[:,:,t_indx] = -x[:,:,s_indx] return test_values(res) def smooth_max(x, t_indx,", "3.0281102269529683, 11.185401112275173) grid = sin(grid, 10, 4, 1.2844464834351186, -45.836492724169695) grid", "= transit(grid, 2, [11, 7, 13], [0.3629247592109436, 0.10073172896374764, 0.5363435118253088]) grid", "[11, 15, 0], [0.08195235243098883, 0.6796005904358621, 0.23844705713314918]) grid = power(grid, 14,", "= transit(grid, 13, [10, 8, 9, 12, 2], [0.031587088727564654, 0.024264739611302585,", "-1.6398586072056767, 84.51374680259704) grid = sin(grid, 1, 1, -0.183401440709518, -88.40242580975152) grid", "15, 10], [0.13237609957996088, 0.22944646977966682, 0.6381774306403722]) grid = transit(grid, 6, [15],", "= smooth_max(grid, 10, 3, 15) grid = magnitude(grid, 9, [15,", "0.7827958631857042, -90.82177259964699) grid = transit(grid, 6, [8, 6, 5, 7,", "0.10141059844877331, 0.2728285912351676, 0.036877533709020166]) grid = transit(grid, 7, [11], [1.0]) grid", "* 0.5 * np.pi * scale + shift) return test_values(res)", "grid = prod(grid, 8, [11, 7, 4, 12]) grid =", "-0.1377650382373763, -96.34412250071645) grid = sin(grid, 7, 3, 1.6405444007982959, -37.09230830685477) grid", "15, 13], [0.1869735689344564, 0.06343641920215143, 0.038951322931441136, 0.04613309733662021, 0.19750663742298355, 0.16072124228620793, 0.15869932715876592, 0.14757838472737334])", "0.011062110917544764, 0.017334502896306194, 0.1522407828502505]) grid = prod(grid, 2, [8, 7, 11,", "-2.5319898824657017, -45.01904701883333) grid = shift(grid, 5, 5, 3.1584260780059252) grid =", "0.8090860808441245 + y * 0.2914526739617249) / 2 grid[:,:,2] = (x", "0.1755951969700656, 0.043989590834687294, 0.22866693087969006, 0.1812166509589377]) grid = sin(grid, 4, 2, -3.329894296119046,", "6) grid = prod(grid, 3, [2, 6, 10, 7, 4])", "2, [1, 7], [0.18247956114317448, 0.8175204388568255]) grid = transit(grid, 8, [11,", "7, 1.6265187305620117, -97.13150019385894) grid = transit(grid, 11, [0, 9], [0.1290607634325389,", "np.sum(x[:,:,s_indx] * alphas, axis = -1) return test_values(res.clip(-1,1)) def sin(x,", "10, 0, 7.741409383532979, -12.082110529508299) grid = prod(grid, 11, [9]) grid", "0.0847896752697893, 0.0921589665387646, 0.008240731277180186, 0.17158558178481512]) grid = transit(grid, 5, [11, 10],", "grid = smooth_max(grid, 10, 3, 15) grid = magnitude(grid, 9,", "0, 12, 2.5526439221510495) grid = sin(grid, 4, 10, -3.680544885171134, 30.633332441673872)", "4], [0.1713900685471786, 0.14082681623065177, 0.19859698568682838, 0.4891861295353413]) grid = transit(grid, 13, [12,", "-4.261918262131112, 18.680580924548693) grid = smooth_max(grid, 2, 2, 11) grid =", "(1 - shift[i]) - 1) ** (1 / (1 -", "grid = transit(grid, 5, [11, 10], [0.9817011300708863, 0.018298869929113594]) grid =", "4, 6, 12], [0.18067242214638962, 0.12939497982917472, 0.08164480089591167, 0.24583958083442445, 0.2244518823086713, 0.13799633398542827]) grid", "magnitude(grid, 6, [14, 5, 13, 11, 2, 9], 2) grid", "0.07753380798970702, 0.11445683149439734, 0.08475226158626031, 0.1416941580568898, 0.020968563089492034, 0.0847896752697893, 0.0921589665387646, 0.008240731277180186, 0.17158558178481512]) grid", "= np.abs((1 - (x [:,:,i]+ 1) / 2) ** (1", "14, [4], 2) grid = sin(grid, 1, 5, 8.18216846853571, -6.729427492311089)", "0.07628940657007711]) grid = transit(grid, 3, [11, 1, 12, 9, 0,", "63.37681521624819) grid = smooth_min(grid, 11, 10, 9) grid = sin(grid,", "0.050730322437922]) grid = prod(grid, 1, [12, 13]) grid = sin(grid,", "grid = inverse(grid, 8, 5) grid = smooth_max(grid, 10, 5,", "8], [0.03500911832175082, 0.03265868671024263, 0.3248025339288217, 0.4234363710484886, 0.13338109758306646, 0.050712192407629864]) grid = transit(grid,", "14], [0.05801706264076675, 0.341923243761946, 0.0494872820880747, 0.29583940098242745, 0.2547330105267852]) grid = inverse(grid, 11,", "[14, 3, 11, 10, 7], [0.5203714128788618, 0.068511863728177, 0.10141059844877331, 0.2728285912351676, 0.036877533709020166])", "30.633332441673872) grid = transit(grid, 11, [12, 6, 9], [0.1597221050818672, 0.523275926379751,", "grid = shift(grid, 9, 8, -1.1449289879251126) grid = transit(grid, 7,", "8, 9, 2.766857264282361) grid = transit(grid, 3, [6, 14, 0,", "5) grid = smooth_max(grid, 10, 5, 13) grid = sin(grid,", "[15], [1.0]) grid = prod(grid, 13, [6, 3, 7]) grid", "- 1 return test_values(res) res = np.zeros((SIZE, SIZE, 3)) res", "1) * 2 - 1 if shift < 0: res[:,:,t_indx]", "0.1636179110868493, 0.03797238284324163, 0.29532957711092916, 0.2839918932530799]) grid = sin(grid, 4, 3, 2.634465399239887,", "13) grid = sin(grid, 9, 10, -1.8565532127479274, -54.75186223635349) grid =", "2]) grid = transit(grid, 15, [0, 3], [0.29345909580747953, 0.7065409041925205]) grid", "[0.036901331671075975, 0.5054281720479712, 0.13288430351514774, 0.10820806749406277, 0.21657812527174225]) grid = transit(grid, 3, [7,", "-1) return test_values(res) def power(x, t_indx, s_indx, p = 1):", "grid = transit(grid, 10, [1], [1.0]) grid = transit(grid, 15,", "* 255).clip(0,255) #save results im = Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__) + '.png')", "8, [14], [1.0]) grid = transit(grid, 4, [1, 12, 15,", "shift(grid, 5, 5, 3.1584260780059252) grid = transit(grid, 10, [9, 8],", "grid = transit(grid, 1, [3], [1.0]) grid = magnitude(grid, 14,", "7, 1.439019575760617, 13.126437741104823) grid = transit(grid, 10, [15, 8, 13,", "[7, 4, 15], 2) grid = transit(grid, 13, [6, 15,", "= magnitude(grid, 13, [8], 2) grid = transit(grid, 13, [15,", "0.25224450337702853]) grid = sin(grid, 11, 8, 4.303514875116891, -67.11152580467314) grid =", "sin(grid, 11, 6, 1.576100090732909, -21.508000199215132) grid = shift(grid, 11, 5,", "0.16179927966914437, 0.30433226546908315]) grid = magnitude(grid, 6, [14, 5, 13, 11,", "np #Numpy version: 1.19.5 from PIL import Image #PIL version:", "[0.9172074355564371, 0.08279256444356292]) grid = transit(grid, 13, [1, 2, 7, 5,", "1, [1, 14, 8], [0.38986786543390084, 0.40057743619803005, 0.20955469836806906]) grid = transit(grid,", "+ y * 0.4365452266748293) / 2 grid[:,:,7] = (x *", "p)) ** (1/p)) / 1.07 return test_values(res) def smooth_min(x, t_indx,", "9, 9, 10) grid = shift(grid, 8, 1, -0.2952350240798842) grid", "2, 11) grid = sin(grid, 13, 13, 7.718114740496995, 55.242200715207815) grid", "2 grid[:,:,15] = (x * 0.49037959172682255 + y * -0.7671554143072785)", "= transit(grid, 4, [8, 4, 15, 9, 10], [0.10267794314653868, 0.019022820046952493,", "= sin(grid, 7, 3, 1.6405444007982959, -37.09230830685477) grid = transit(grid, 9,", "[0.29712982335534416, 0.2526657169525107, 0.08415696601637544, 0.18541009701166816, 0.011062110917544764, 0.017334502896306194, 0.1522407828502505]) grid = prod(grid,", "= x.copy() res[:,:,t_indx] = -np.log((np.exp(-x[:,:,s1_indx] * p) + np.exp(-x[:,:,s2_indx] *", "= magnitude(grid, 7, [6, 12, 7, 13, 8], 2) grid", "-96.34412250071645) grid = sin(grid, 7, 3, 1.6405444007982959, -37.09230830685477) grid =", "5], [0.11084510086381213, 0.003439701966452383, 0.10819642722960272, 0.15371289739415475, 0.25812192912399506, 0.005727171643985687, 0.14633649245899077, 0.033890406689391105, 0.05550396325806974,", "0.036877533709020166]) grid = transit(grid, 7, [11], [1.0]) grid = transit(grid,", "smooth_max(grid, 1, 0, 1) grid = sin(grid, 9, 4, 3.0281102269529683,", "= transit(grid, 3, [15, 11, 2, 8, 0], [0.28772794692354614, 0.1935939805514465,", "shift(grid, 13, 3, 5.677279514103952) grid = transit(grid, 3, [15, 11,", "0.3467981515651057, 0.262107802795733, 0.038001653167336905, 0.2112967596903696, 0.002128256606899112, 0.08103579316557531]) grid = shift(grid, 3,", "-0.8484277738516293 + y * -0.5155435342135386) / 2 grid[:,:,4] = (x", "[0.05801706264076675, 0.341923243761946, 0.0494872820880747, 0.29583940098242745, 0.2547330105267852]) grid = inverse(grid, 11, 5)", "2, 2.55681173849493) grid = sin(grid, 10, 14, 0.8649185298731181, 3.1973516320924773) grid", "6, 11, 14], [0.10006330804326793, 0.03891760159161208, 0.005474465860804227, 0.12962618248625338, 0.03090992138168193, 0.016043163973997736, 0.13259375374543056,", "grid = magnitude(grid, 9, [15, 7], 2) grid = transit(grid,", "8, 15, 2, 10, 14], [0.20381942291270427, 0.07753380798970702, 0.11445683149439734, 0.08475226158626031, 0.1416941580568898,", "18.680580924548693) grid = smooth_max(grid, 2, 2, 11) grid = sin(grid,", "[1, 2], [0.9078557995211777, 0.09214420047882232]) grid = smooth_max(grid, 1, 0, 1)", "9], 2) grid = sin(grid, 9, 5, -5.606152225672729, -35.928477282758536) grid", "= inverse(grid, 4, 5) grid = transit(grid, 1, [4, 14,", "0.08161640805634696, 0.08706050582840198, 0.2243337708440404, 0.11415517296465624]) grid = sin(grid, 11, 13, -6.909579361872105,", "< -1 or np.amax(arr) > 1: raise Exception('Values went to", "0.6381774306403722]) grid = transit(grid, 6, [15], [1.0]) grid = sin(grid,", "2, [9, 11, 10], [0.2662646690994658, 0.2460545507972383, 0.4876807801032959]) grid = transit(grid,", "y*SIZE:(y+1)*SIZE] = grid[:,:,j] img = (img + 1) * 127.5", "-53.62090724330151) grid = sin(grid, 10, 10, 0.7827958631857042, -90.82177259964699) grid =", "11, [12, 6, 9], [0.1597221050818672, 0.523275926379751, 0.31700196853838186]) grid = sin(grid,", "15) grid = smooth_max(grid, 5, 8, 4) grid = transit(grid,", "= (x * 0.49037959172682255 + y * -0.7671554143072785) / 2", "1) ** (1 / (1 + shift)) + 1) *", "grid = transit(grid, 13, [6, 15, 11, 9, 12], [0.21908823570589997,", "12, 6.470760426148978, -53.62090724330151) grid = sin(grid, 10, 10, 0.7827958631857042, -90.82177259964699)", "= sin(grid, 4, 3, 2.634465399239887, 62.07538440217337) grid = sin(grid, 7,", "= sin(grid, 11, 8, 4.303514875116891, -67.11152580467314) grid = prod(grid, 5,", "7, 5.409920766787869, -58.09956716630187) grid = sin(grid, 2, 15, -2.5319898824657017, -45.01904701883333)", "15, 0, 6], [0.24973877983541862, 0.3378766591098989, 0.15974656746239488, 0.027776085211312595, 0.02330072841260748, 0.20156117996836745]) grid", "11, 0, 15], [0.036901331671075975, 0.5054281720479712, 0.13288430351514774, 0.10820806749406277, 0.21657812527174225]) grid =", "#apply transformations to the grid grid = transit(grid, 4, [7,", "4, 2, 11, 13], [0.03597236183123865, 0.04938629068404894, 0.08457069101219464, 0.014801187461296406, 0.3649334871683411, 0.28062233683539095,", "15, 4, 1, 0, 14], [0.29712982335534416, 0.2526657169525107, 0.08415696601637544, 0.18541009701166816, 0.011062110917544764,", "= smooth_max(grid, 1, 0, 1) grid = sin(grid, 9, 4,", "0.4030414045204916, 0.07629394446370606]) grid = magnitude(grid, 13, [7, 4, 15], 2)", "2, [12], [1.0]) grid = prod(grid, 14, [11, 10]) grid", "res = np.zeros((SIZE, SIZE, 3)) res += shift_colors(grid[:,:,0:1].repeat(3, -1), [1.9355805467383669,", "9, 11) grid = sin(grid, 4, 15, -1.9527829039221054, 20.537776250912316) grid", "grid = sin(grid, 7, 2, 3.41043792019894, 65.36615977552518) grid = transit(grid,", "0.36113653779766947]) grid = transit(grid, 14, [10, 14, 4, 9, 13,", "0.02330072841260748, 0.20156117996836745]) grid = smooth_min(grid, 0, 5, 1) grid =", "grid = transit(grid, 15, [15], [1.0]) grid = prod(grid, 13,", "11, [9, 0, 11, 7, 3, 8], [0.03500911832175082, 0.03265868671024263, 0.3248025339288217,", "transformation methods def transit(x, t_indx, s_indx, alphas): res = x.copy()", "11, 2, 13], [0.381505247910628, 0.12073241493361198, 0.3454992433435407, 0.15226309381221942]) grid = magnitude(grid,", "sin(grid, 6, 11, -0.7697482296056479, 23.55348445076298) grid = sin(grid, 7, 7,", "1, 11) grid = transit(grid, 5, [11, 4, 2, 1,", "transit(grid, 1, [14], [1.0]) grid = transit(grid, 8, [9, 10,", "np.zeros((SIZE, SIZE, GRID_CHANNELS)) x = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((1,", "0.0072484377164178625, 0.4477791048998878, 0.11849249751317383]) grid = transit(grid, 10, [5, 11, 15,", "11], [0.036102265915692405, 0.1224495166624379, 0.2384660328868578, 0.3357862916746864, 0.2671958928603256]) grid = smooth_min(grid, 1,", "2.5947698108630664, -90.74050288622541) grid = sin(grid, 9, 8, -0.8743741598911887, 15.92872484723533) grid", "0.1242259093715456]) grid = smooth_max(grid, 10, 15, 10) grid = transit(grid,", "- shift[i]) - 1) ** (1 / (1 - shift[i]))", "= smooth_min(grid, 12, 9, 11) grid = sin(grid, 4, 15,", "7, -0.3409112713023047, 75.93313567333723) grid = transit(grid, 11, [5, 10, 7],", "11, 3, 8, 7], [0.207462236904601, 0.11516125867317799, 0.12240760599022518, 0.05066197369764289, 0.13869178538077429, 0.09948828746526778,", "** (1 / (1 - shift[i])) * 2 - 1", "0.2384660328868578, 0.3357862916746864, 0.2671958928603256]) grid = smooth_min(grid, 1, 1, 11) grid", "11, 10, 15, 0, 5]) grid = transit(grid, 11, [7,", "0.6796005904358621, 0.23844705713314918]) grid = power(grid, 14, 0, 0.10854801586669052) grid =", "grid = transit(grid, 6, [6, 13, 7], [0.16813621041531998, 0.42150135317124293, 0.410362436413437])", "[0.18247956114317448, 0.8175204388568255]) grid = transit(grid, 8, [11, 15, 0], [0.08195235243098883,", "= smooth_min(grid, 13, 10, 15) grid = transit(grid, 1, [12,", "if shift > 0: res[:,:,t_indx] = (-np.abs(((x[:,:,s_indx] + 1) /", "grid = shift(grid, 5, 5, 3.1584260780059252) grid = transit(grid, 10,", "-2.4657577404884132, 72.95418196004374) grid = transit(grid, 12, [7, 4, 10, 5],", "grid = prod(grid, 1, [12, 13]) grid = sin(grid, 6,", "4, 1, 8) grid = sin(grid, 4, 4, 3.47544933993972, -37.11795195118333)", "11, [1, 15, 5, 0, 6, 12, 2, 7, 4],", "12, 10, 3.6427863324838423, 99.297524709649) grid = sin(grid, 5, 14, -1.45141083652418,", "im = Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__) + '.png') #save layers img =", "= transit(grid, 2, [1, 7], [0.18247956114317448, 0.8175204388568255]) grid = transit(grid,", "7], [0.16813621041531998, 0.42150135317124293, 0.410362436413437]) grid = inverse(grid, 6, 6) grid", "grid = transit(grid, 8, [14], [1.0]) grid = transit(grid, 4,", "def inverse(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx] = -x[:,:,s_indx]", "0.13045782410775286, 0.02917564277599849, 0.12489006625007311, 0.13398690135296518]) grid = transit(grid, 2, [2, 0,", "= transit(grid, 11, [9, 11], [0.37033495928182997, 0.6296650407181701]) grid = smooth_min(grid,", "= sin(grid, 8, 10, 2.5947698108630664, -90.74050288622541) grid = sin(grid, 9,", "s_indx, ord = 2): res = x.copy() res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx],", "15, 5, 7], [0.06492287400539203, 0.21223490901058306, 0.36311130408652753, 0.09994467226348329, 0.12833432959710458, 0.1314519110369097]) grid", "11, 5], [0.421270391024163, 0.5054038923567993, 0.07332571661903758]) grid = transit(grid, 11, [1,", "3, 11], [0.13835365002720226, 0.008781149737259792, 0.24627334258742545, 0.04870190081124998, 0.049950480577274, 0.15123046752435387, 0.31255198044446264, 0.04415702829077187])", "inverse(grid, 15, 10) grid = shift(grid, 6, 1, -1.115193397983063) grid", "= prod(grid, 2, [8, 7, 11, 10, 15, 0, 5])", "[3], [1.0]) grid = inverse(grid, 8, 5) grid = smooth_max(grid,", "grid = sin(grid, 9, 6, 1.6821417847846682, -64.12547446801875) grid = sin(grid,", "[1.0]) grid = transit(grid, 1, [8, 10, 15, 14, 9],", "6, 5, 0.9223892145169746) grid = transit(grid, 2, [9, 11, 10],", "[15, 6, 2, 7], [0.45073658968521574, 0.16060948991238613, 0.12949271785123345, 0.2591612025511646]) grid =", "0.14633649245899077, 0.033890406689391105, 0.05550396325806974, 0.1242259093715456]) grid = smooth_max(grid, 10, 15, 10)", "5, 1.0526879494498724) grid = transit(grid, 1, [14], [1.0]) grid =", "7, 4, 12]) grid = transit(grid, 7, [15, 6, 2,", "test_values(res) def prod(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx] =", "y * -0.3867357840809138) / 2 grid[:,:,15] = (x * 0.49037959172682255", "2, 7, 5, 8, 9, 15], [0.085742434722219, 0.4119764535375412, 0.08377067725345017, 0.13045782410775286,", "shift[i])) + 1) * 2 - 1 if shift[i] <", "1, 0) grid = smooth_max(grid, 1, 15, 12) grid =", "grid = transit(grid, 8, [1], [1.0]) grid = sin(grid, 4,", "15, -2.5319898824657017, -45.01904701883333) grid = shift(grid, 5, 5, 3.1584260780059252) grid", "= (x * 0.9386329219527516 + y * -0.45147169454413794) / 2", "5, 8, 9, 15], [0.085742434722219, 0.4119764535375412, 0.08377067725345017, 0.13045782410775286, 0.02917564277599849, 0.12489006625007311,", "+ y * 0.9515468928881716) / 2 grid[:,:,6] = (x *", "s_indx): res = x.copy() res[:,:,t_indx] = np.prod(x[:,:,s_indx], -1) return test_values(res)", "- 1 if shift < 0: res[:,:,t_indx] = np.abs((1 -", "y * -0.6817079327248272) / 2 grid[:,:,11] = (x * 0.8435706697714382", "4 img[x*SIZE:(x + 1)*SIZE, y*SIZE:(y+1)*SIZE] = grid[:,:,j] img = (img", "grid = transit(grid, 2, [0, 15, 10], [0.005204838856346087, 0.5116602651328436, 0.48313489601081044])", "0.41561584065110807]) grid = transit(grid, 2, [0, 4, 2], [0.010597803396528332, 0.7371576932264431,", "7], [0.45073658968521574, 0.16060948991238613, 0.12949271785123345, 0.2591612025511646]) grid = transit(grid, 10, [11,", "0.03265868671024263, 0.3248025339288217, 0.4234363710484886, 0.13338109758306646, 0.050712192407629864]) grid = transit(grid, 7, [14,", "transit(grid, 6, [6, 13, 7], [0.16813621041531998, 0.42150135317124293, 0.410362436413437]) grid =", "res = x.copy() res[:,:,t_indx] = np.sum(x[:,:,s_indx] * alphas, axis =", "0) grid = smooth_max(grid, 1, 15, 12) grid = prod(grid,", "magnitude(grid, 1, [7], 2) grid = smooth_min(grid, 7, 4, 13)", "= transit(grid, 10, [15, 8, 13, 2], [0.32464063956303774, 0.20922781529873477, 0.16179927966914437,", "= sin(grid, 9, 8, -0.8743741598911887, 15.92872484723533) grid = transit(grid, 4,", "0.13714679001436697]) grid = transit(grid, 4, [14, 11, 12, 13, 4,", "bit (AMD64)] # For more information visit: https://github.com/volotat/GAS #import python", "3, 8) grid = transit(grid, 13, [13, 0, 5, 14],", "= sin(grid, 10, 9, 6.219381309190064, -71.03631884776823) grid = sin(grid, 9,", "4, 0], [0.2070905138265326, 0.06562120796792839, 0.17355051228662716, 0.05514926535269553, 0.0829726599151083, 0.41561584065110807]) grid =", "[0.1869735689344564, 0.06343641920215143, 0.038951322931441136, 0.04613309733662021, 0.19750663742298355, 0.16072124228620793, 0.15869932715876592, 0.14757838472737334]) grid =", "grid = sin(grid, 14, 5, 0.053526366336325744, 4.147364704932215) grid = transit(grid,", "p)) ** (1/p)) / 1.07 return test_values(res) def prod(x, t_indx,", "grid = transit(grid, 3, [11, 1, 12, 9, 0, 8,", "2], [0.010597803396528332, 0.7371576932264431, 0.25224450337702853]) grid = sin(grid, 11, 8, 4.303514875116891,", "[ %.2f : %.2f ]'%(np.amin(arr), np.amax(arr)) ) return arr #define", "sin(grid, 12, 10, 3.6427863324838423, 99.297524709649) grid = sin(grid, 5, 14,", "3], [0.29345909580747953, 0.7065409041925205]) grid = sin(grid, 12, 4, -1.6398586072056767, 84.51374680259704)", "sin(grid, 5, 2, -2.2972705471452146, -12.522748365129786) grid = smooth_min(grid, 12, 9,", "1, [14], [1.0]) grid = transit(grid, 8, [9, 10, 2,", "x.copy() res[:,:,t_indx] = np.sin(x[:,:,s_indx] * 0.5 * np.pi * scale", "grid = shift(grid, 6, 1, -1.115193397983063) grid = smooth_max(grid, 13,", "alphas): res = x.copy() res[:,:,t_indx] = np.sum(x[:,:,s_indx] * alphas, axis", "grid[:,:,13] = (x * -0.5864100240508576 + y * -0.9425245660964123) /", "2, 11, 14], 2) grid = transit(grid, 12, [8, 11,", "0.3417883406507845]) grid = transit(grid, 15, [7, 3], [0.9172074355564371, 0.08279256444356292]) grid", "grid = magnitude(grid, 13, [11, 7], 2) grid = sin(grid,", "= power(grid, 10, 5, 0.12539493928522222) grid = power(grid, 0, 12,", "= sin(grid, 8, 2, 3.501615294498545, -75.50049353340206) grid = prod(grid, 9,", "x.copy() res[:,:,t_indx] = -x[:,:,s_indx] return test_values(res) def smooth_max(x, t_indx, s1_indx,", "0.03090992138168193, 0.016043163973997736, 0.13259375374543056, 0.09920705802758992, 0.1415090600653345, 0.09597789664069131, 0.06106766497801195, 0.14032187015082653, 0.008288053054498123]) grid", "Generation date: 2021-11-28 09:21:40 UTC # GAS change date: 2021-11-28", "version: 8.1.2 #set initial params SIZE = 768 GRID_CHANNELS =", "76.37702042567852) grid = magnitude(grid, 15, [5, 3, 8, 0, 15],", "shift[i] < 0: res[:,:,i] = np.abs((1 - (x [:,:,i]+ 1)", "test_values(arr): if np.isnan(arr).any(): raise Exception('Array has None elements!') if np.amin(arr)", "smooth_max(grid, 8, 10, 6) grid = prod(grid, 3, [2, 6,", "shift): res = x.copy() if shift > 0: res[:,:,t_indx] =", "/ 1.07 return test_values(res) def smooth_min(x, t_indx, s1_indx, s2_indx, p", "13.126437741104823) grid = transit(grid, 10, [15, 8, 13, 2], [0.32464063956303774,", "1) ** (1 / (1 + shift[i])) + 1) *", "0.11615833154358073, 0.16741234630810867]) grid = prod(grid, 0, [0, 1, 2, 14])", "0.033890406689391105, 0.05550396325806974, 0.1242259093715456]) grid = smooth_max(grid, 10, 15, 10) grid", "4, [3], [1.0]) grid = sin(grid, 3, 12, -4.078686662791614, 24.459526349523884)", "-0.4693746108213766, -98.17810769380118) grid = sin(grid, 12, 10, 3.6427863324838423, 99.297524709649) grid", "11, [9]) grid = sin(grid, 4, 3, 0.10154488887533689, 12.479110491961137) grid", "0.0904752958055755, 0.11683555248582808, 0.30211530092641004]) grid = sin(grid, 5, 2, -2.2972705471452146, -12.522748365129786)", "13, 5, 0, 7, 8, 9, 12, 6, 11, 14],", "for i in range(x.shape[-1]): if shift[i] > 0: res[:,:,i] =", "9, 15, -2.507870105026106, -89.43842740853354) grid = transit(grid, 0, [12, 6,", "51.94880270063618) grid = smooth_min(grid, 13, 10, 15) grid = transit(grid,", "0.03622483092076182, 0.09070212266434277, 0.4030414045204916, 0.07629394446370606]) grid = magnitude(grid, 13, [7, 4,", "13, [5, 15, 10], [0.13237609957996088, 0.22944646977966682, 0.6381774306403722]) grid = transit(grid,", "inverse(grid, 0, 0) grid = magnitude(grid, 13, [8], 2) grid", "= transit(grid, 13, [11, 0], [0.6569516962992897, 0.3430483037007103]) grid = sin(grid,", "shift(grid, 8, 9, 2.766857264282361) grid = transit(grid, 3, [6, 14,", "10, 9) grid = sin(grid, 13, 2, 4.295107938126156, 57.378601701270014) grid", "0.11556721863292163, 0.12372657123165616, 0.1356897031789931, 0.20047556686480725, 0.09921434949484752, 0.05399039482501285]) grid = transit(grid, 9,", "t_indx, s_indx, ord = 2): res = x.copy() res[:,:,t_indx] =", "4, 3, 0.10154488887533689, 12.479110491961137) grid = magnitude(grid, 1, [7], 2)", "7, 7, 0.5492744322205282, 35.873568370773654) grid = transit(grid, 7, [13], [1.0])", "6, 6) grid = sin(grid, 7, 15, -4.9164570678736865, 86.15931416043557) grid", "[0.9078557995211777, 0.09214420047882232]) grid = smooth_max(grid, 1, 0, 1) grid =", "grid = sin(grid, 10, 3, -2.5681840787633137, -30.256455817944243) grid = sin(grid,", "[0.09662806703796267, 0.1621478194912538, 0.21548762580464817, 0.5257364876661353]) grid = inverse(grid, 1, 0) grid", "1) grid = sin(grid, 9, 4, 3.0281102269529683, 11.185401112275173) grid =", "x.copy() res[:,:,t_indx] = np.sum(x[:,:,s_indx] * alphas, axis = -1) return", "1, 0, 14], [0.29712982335534416, 0.2526657169525107, 0.08415696601637544, 0.18541009701166816, 0.011062110917544764, 0.017334502896306194, 0.1522407828502505])", "+ y * -0.3867357840809138) / 2 grid[:,:,15] = (x *", "0.11303295854369695, 0.13714679001436697]) grid = transit(grid, 4, [14, 11, 12, 13,", "grid = sin(grid, 14, 8, -0.4693746108213766, -98.17810769380118) grid = sin(grid,", "= transit(grid, 7, [15, 6, 2, 7], [0.45073658968521574, 0.16060948991238613, 0.12949271785123345,", "-54.75186223635349) grid = transit(grid, 10, [14], [1.0]) grid = transit(grid,", "grid = sin(grid, 12, 14, 1.097917736937588, 58.87772371184383) grid = transit(grid,", "[11, 7, 4, 12]) grid = transit(grid, 7, [15, 6,", "-0.8743741598911887, 15.92872484723533) grid = transit(grid, 4, [3, 13, 9, 8,", "0.5054281720479712, 0.13288430351514774, 0.10820806749406277, 0.21657812527174225]) grid = transit(grid, 3, [7, 3,", "shift(grid, 8, 1, -0.2952350240798842) grid = sin(grid, 11, 6, 1.576100090732909,", "+ 1)*SIZE, y*SIZE:(y+1)*SIZE] = grid[:,:,j] img = (img + 1)", "grid = sin(grid, 4, 3, 2.634465399239887, 62.07538440217337) grid = sin(grid,", "[8, 6, 5, 7, 4, 2], [0.39579476392315127, 0.3200094081197146, 0.06439062651950353, 0.03284446726347166,", "- 1) ** (1 / (1 + shift)) + 1)", "[5, 3, 8, 0, 15], 2) grid = prod(grid, 2," ]
[ "training: ``` <tok_0>\\t<count_0> <tok_1>\\t<count_1> ``` \"\"\" ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) assert opts.n_sample", "init_logger() set_random_seed(opts.seed, False) transforms_cls = get_transforms_cls(opts._all_transform) fields = None transforms", "tok, count in counter.most_common(): fo.write(tok + \"\\t\" + str(count) +", "opts.src_feats_vocab[k]) def _get_parser(): parser = ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser, build_vocab_only=True) return parser", "+= tgt_counter tgt_counter = src_counter logger.info(f\"Counters after share:{len(src_counter)}\") save_counter(src_counter, opts.src_vocab)", "as `-src_vocab` (and `-tgt_vocab`) when training: ``` <tok_0>\\t<count_0> <tok_1>\\t<count_1> ```", "(and `-tgt_vocab`) when training: ``` <tok_0>\\t<count_0> <tok_1>\\t<count_1> ``` \"\"\" ArgumentParser.validate_prepare_opts(opts,", "<tok_0>\\t<count_0> <tok_1>\\t<count_1> ``` \"\"\" ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) assert opts.n_sample == -1", "as fo: for tok, count in counter.most_common(): fo.write(tok + \"\\t\"", "onmt.transforms import make_transforms, get_transforms_cls def build_vocab_main(opts): \"\"\"Apply transforms to samples", "data and build vocab from it. Transforms that need vocab", "open(save_path, \"w\", encoding=\"utf8\") as fo: for tok, count in counter.most_common():", "\"\\t\" + str(count) + \"\\n\") if opts.share_vocab: src_counter += tgt_counter", "vocab from {opts.n_sample} samples.\") src_counter, tgt_counter, src_feats_counter = build_vocab( opts,", "save_counter(src_counter, opts.src_vocab) save_counter(tgt_counter, opts.tgt_vocab) for k, v in src_feats_counter.items(): save_counter(v,", "if opts.share_vocab: src_counter += tgt_counter tgt_counter = src_counter logger.info(f\"Counters after", "tgt_counter tgt_counter = src_counter logger.info(f\"Counters after share:{len(src_counter)}\") save_counter(src_counter, opts.src_vocab) else:", "Built vocab is saved in plain text format as following", "v in src_feats_counter.items(): save_counter(v, opts.src_feats_vocab[k]) def _get_parser(): parser = ArgumentParser(description='build_vocab.py')", "need vocab will be disabled in this. Built vocab is", "k, v in src_feats_counter.items(): save_counter(v, opts.src_feats_vocab[k]) def _get_parser(): parser =", "check_path from onmt.utils.parse import ArgumentParser from onmt.opts import dynamic_prepare_opts from", "vocab is saved in plain text format as following and", "n_sample={opts.n_sample}.\" logger = init_logger() set_random_seed(opts.seed, False) transforms_cls = get_transforms_cls(opts._all_transform) fields", "ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser, build_vocab_only=True) return parser def main(): parser = _get_parser()", "import ArgumentParser from onmt.opts import dynamic_prepare_opts from onmt.inputters.corpus import build_vocab", "from onmt.transforms import make_transforms, get_transforms_cls def build_vocab_main(opts): \"\"\"Apply transforms to", "transforms to samples of specified data and build vocab from", "opts.src_vocab) save_counter(tgt_counter, opts.tgt_vocab) for k, v in src_feats_counter.items(): save_counter(v, opts.src_feats_vocab[k])", "following and can be pass as `-src_vocab` (and `-tgt_vocab`) when", "transforms_cls = get_transforms_cls(opts._all_transform) fields = None transforms = make_transforms(opts, transforms_cls,", "save_counter(v, opts.src_feats_vocab[k]) def _get_parser(): parser = ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser, build_vocab_only=True) return", "\"\"\" ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) assert opts.n_sample == -1 or opts.n_sample >", "dynamic_prepare_opts(parser, build_vocab_only=True) return parser def main(): parser = _get_parser() opts,", "coutings from transformed corpora samples.\"\"\" from onmt.utils.logging import init_logger from", "onmt.utils.logging import init_logger from onmt.utils.misc import set_random_seed, check_path from onmt.utils.parse", "samples of specified data and build vocab from it. Transforms", "is saved in plain text format as following and can", "f\"Illegal argument n_sample={opts.n_sample}.\" logger = init_logger() set_random_seed(opts.seed, False) transforms_cls =", "and build vocab from it. Transforms that need vocab will", "corpora samples.\"\"\" from onmt.utils.logging import init_logger from onmt.utils.misc import set_random_seed,", "build_vocab( opts, transforms, n_sample=opts.n_sample) logger.info(f\"Counters src:{len(src_counter)}\") logger.info(f\"Counters tgt:{len(tgt_counter)}\") for feat_name,", "assert opts.n_sample == -1 or opts.n_sample > 1, \\ f\"Illegal", "from transformed corpora samples.\"\"\" from onmt.utils.logging import init_logger from onmt.utils.misc", "make_transforms, get_transforms_cls def build_vocab_main(opts): \"\"\"Apply transforms to samples of specified", "in src_feats_counter.items(): logger.info(f\"Counters {feat_name}:{len(feat_counter)}\") def save_counter(counter, save_path): check_path(save_path, exist_ok=opts.overwrite, log=logger.warning)", "{feat_name}:{len(feat_counter)}\") def save_counter(counter, save_path): check_path(save_path, exist_ok=opts.overwrite, log=logger.warning) with open(save_path, \"w\",", "vocab will be disabled in this. Built vocab is saved", "src_feats_counter = build_vocab( opts, transforms, n_sample=opts.n_sample) logger.info(f\"Counters src:{len(src_counter)}\") logger.info(f\"Counters tgt:{len(tgt_counter)}\")", "ArgumentParser from onmt.opts import dynamic_prepare_opts from onmt.inputters.corpus import build_vocab from", "logger.info(f\"Counters src:{len(src_counter)}\") logger.info(f\"Counters tgt:{len(tgt_counter)}\") for feat_name, feat_counter in src_feats_counter.items(): logger.info(f\"Counters", "from it. Transforms that need vocab will be disabled in", "src_counter, tgt_counter, src_feats_counter = build_vocab( opts, transforms, n_sample=opts.n_sample) logger.info(f\"Counters src:{len(src_counter)}\")", "for tok, count in counter.most_common(): fo.write(tok + \"\\t\" + str(count)", "for k, v in src_feats_counter.items(): save_counter(v, opts.src_feats_vocab[k]) def _get_parser(): parser", "samples.\") src_counter, tgt_counter, src_feats_counter = build_vocab( opts, transforms, n_sample=opts.n_sample) logger.info(f\"Counters", "set_random_seed, check_path from onmt.utils.parse import ArgumentParser from onmt.opts import dynamic_prepare_opts", "return parser def main(): parser = _get_parser() opts, unknown =", "can be pass as `-src_vocab` (and `-tgt_vocab`) when training: ```", "make_transforms(opts, transforms_cls, fields) logger.info(f\"Counter vocab from {opts.n_sample} samples.\") src_counter, tgt_counter,", "with open(save_path, \"w\", encoding=\"utf8\") as fo: for tok, count in", "to samples of specified data and build vocab from it.", "this. Built vocab is saved in plain text format as", "samples.\"\"\" from onmt.utils.logging import init_logger from onmt.utils.misc import set_random_seed, check_path", "else: save_counter(src_counter, opts.src_vocab) save_counter(tgt_counter, opts.tgt_vocab) for k, v in src_feats_counter.items():", "def _get_parser(): parser = ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser, build_vocab_only=True) return parser def", "fo.write(tok + \"\\t\" + str(count) + \"\\n\") if opts.share_vocab: src_counter", "dynamic_prepare_opts from onmt.inputters.corpus import build_vocab from onmt.transforms import make_transforms, get_transforms_cls", "specified data and build vocab from it. Transforms that need", "when training: ``` <tok_0>\\t<count_0> <tok_1>\\t<count_1> ``` \"\"\" ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) assert", "src:{len(src_counter)}\") logger.info(f\"Counters tgt:{len(tgt_counter)}\") for feat_name, feat_counter in src_feats_counter.items(): logger.info(f\"Counters {feat_name}:{len(feat_counter)}\")", "= ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser, build_vocab_only=True) return parser def main(): parser =", "_get_parser() opts, unknown = parser.parse_known_args() build_vocab_main(opts) if __name__ == '__main__':", "or opts.n_sample > 1, \\ f\"Illegal argument n_sample={opts.n_sample}.\" logger =", "import dynamic_prepare_opts from onmt.inputters.corpus import build_vocab from onmt.transforms import make_transforms,", "check_path(save_path, exist_ok=opts.overwrite, log=logger.warning) with open(save_path, \"w\", encoding=\"utf8\") as fo: for", "from {opts.n_sample} samples.\") src_counter, tgt_counter, src_feats_counter = build_vocab( opts, transforms,", "\"\"\"Apply transforms to samples of specified data and build vocab", "+ \"\\t\" + str(count) + \"\\n\") if opts.share_vocab: src_counter +=", "text format as following and can be pass as `-src_vocab`", "= init_logger() set_random_seed(opts.seed, False) transforms_cls = get_transforms_cls(opts._all_transform) fields = None", "n_sample=opts.n_sample) logger.info(f\"Counters src:{len(src_counter)}\") logger.info(f\"Counters tgt:{len(tgt_counter)}\") for feat_name, feat_counter in src_feats_counter.items():", "from onmt.opts import dynamic_prepare_opts from onmt.inputters.corpus import build_vocab from onmt.transforms", "onmt.opts import dynamic_prepare_opts from onmt.inputters.corpus import build_vocab from onmt.transforms import", "build vocab from it. Transforms that need vocab will be", "transforms = make_transforms(opts, transforms_cls, fields) logger.info(f\"Counter vocab from {opts.n_sample} samples.\")", "logger.info(f\"Counter vocab from {opts.n_sample} samples.\") src_counter, tgt_counter, src_feats_counter = build_vocab(", "save_path): check_path(save_path, exist_ok=opts.overwrite, log=logger.warning) with open(save_path, \"w\", encoding=\"utf8\") as fo:", "vocab from it. Transforms that need vocab will be disabled", "in counter.most_common(): fo.write(tok + \"\\t\" + str(count) + \"\\n\") if", "from onmt.utils.misc import set_random_seed, check_path from onmt.utils.parse import ArgumentParser from", "will be disabled in this. Built vocab is saved in", "of specified data and build vocab from it. Transforms that", "and can be pass as `-src_vocab` (and `-tgt_vocab`) when training:", "from onmt.utils.parse import ArgumentParser from onmt.opts import dynamic_prepare_opts from onmt.inputters.corpus", "``` <tok_0>\\t<count_0> <tok_1>\\t<count_1> ``` \"\"\" ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) assert opts.n_sample ==", "count in counter.most_common(): fo.write(tok + \"\\t\" + str(count) + \"\\n\")", "encoding=\"utf8\") as fo: for tok, count in counter.most_common(): fo.write(tok +", "share:{len(src_counter)}\") save_counter(src_counter, opts.src_vocab) else: save_counter(src_counter, opts.src_vocab) save_counter(tgt_counter, opts.tgt_vocab) for k,", "feat_counter in src_feats_counter.items(): logger.info(f\"Counters {feat_name}:{len(feat_counter)}\") def save_counter(counter, save_path): check_path(save_path, exist_ok=opts.overwrite,", "tgt:{len(tgt_counter)}\") for feat_name, feat_counter in src_feats_counter.items(): logger.info(f\"Counters {feat_name}:{len(feat_counter)}\") def save_counter(counter,", "in plain text format as following and can be pass", "None transforms = make_transforms(opts, transforms_cls, fields) logger.info(f\"Counter vocab from {opts.n_sample}", "src_feats_counter.items(): logger.info(f\"Counters {feat_name}:{len(feat_counter)}\") def save_counter(counter, save_path): check_path(save_path, exist_ok=opts.overwrite, log=logger.warning) with", "\"\"\"Get vocabulary coutings from transformed corpora samples.\"\"\" from onmt.utils.logging import", "save_counter(counter, save_path): check_path(save_path, exist_ok=opts.overwrite, log=logger.warning) with open(save_path, \"w\", encoding=\"utf8\") as", "build_vocab from onmt.transforms import make_transforms, get_transforms_cls def build_vocab_main(opts): \"\"\"Apply transforms", "opts.n_sample > 1, \\ f\"Illegal argument n_sample={opts.n_sample}.\" logger = init_logger()", "save_counter(src_counter, opts.src_vocab) else: save_counter(src_counter, opts.src_vocab) save_counter(tgt_counter, opts.tgt_vocab) for k, v", "pass as `-src_vocab` (and `-tgt_vocab`) when training: ``` <tok_0>\\t<count_0> <tok_1>\\t<count_1>", "src_counter += tgt_counter tgt_counter = src_counter logger.info(f\"Counters after share:{len(src_counter)}\") save_counter(src_counter,", "logger = init_logger() set_random_seed(opts.seed, False) transforms_cls = get_transforms_cls(opts._all_transform) fields =", "parser = _get_parser() opts, unknown = parser.parse_known_args() build_vocab_main(opts) if __name__", "src_feats_counter.items(): save_counter(v, opts.src_feats_vocab[k]) def _get_parser(): parser = ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser, build_vocab_only=True)", "+ str(count) + \"\\n\") if opts.share_vocab: src_counter += tgt_counter tgt_counter", "onmt.utils.misc import set_random_seed, check_path from onmt.utils.parse import ArgumentParser from onmt.opts", "\"w\", encoding=\"utf8\") as fo: for tok, count in counter.most_common(): fo.write(tok", "be pass as `-src_vocab` (and `-tgt_vocab`) when training: ``` <tok_0>\\t<count_0>", "feat_name, feat_counter in src_feats_counter.items(): logger.info(f\"Counters {feat_name}:{len(feat_counter)}\") def save_counter(counter, save_path): check_path(save_path,", "transforms, n_sample=opts.n_sample) logger.info(f\"Counters src:{len(src_counter)}\") logger.info(f\"Counters tgt:{len(tgt_counter)}\") for feat_name, feat_counter in", "> 1, \\ f\"Illegal argument n_sample={opts.n_sample}.\" logger = init_logger() set_random_seed(opts.seed,", "counter.most_common(): fo.write(tok + \"\\t\" + str(count) + \"\\n\") if opts.share_vocab:", "= src_counter logger.info(f\"Counters after share:{len(src_counter)}\") save_counter(src_counter, opts.src_vocab) else: save_counter(src_counter, opts.src_vocab)", "= get_transforms_cls(opts._all_transform) fields = None transforms = make_transforms(opts, transforms_cls, fields)", "_get_parser(): parser = ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser, build_vocab_only=True) return parser def main():", "onmt.inputters.corpus import build_vocab from onmt.transforms import make_transforms, get_transforms_cls def build_vocab_main(opts):", "str(count) + \"\\n\") if opts.share_vocab: src_counter += tgt_counter tgt_counter =", "transformed corpora samples.\"\"\" from onmt.utils.logging import init_logger from onmt.utils.misc import", "get_transforms_cls def build_vocab_main(opts): \"\"\"Apply transforms to samples of specified data", "import build_vocab from onmt.transforms import make_transforms, get_transforms_cls def build_vocab_main(opts): \"\"\"Apply", "build_vocab_main(opts): \"\"\"Apply transforms to samples of specified data and build", "opts, transforms, n_sample=opts.n_sample) logger.info(f\"Counters src:{len(src_counter)}\") logger.info(f\"Counters tgt:{len(tgt_counter)}\") for feat_name, feat_counter", "`-src_vocab` (and `-tgt_vocab`) when training: ``` <tok_0>\\t<count_0> <tok_1>\\t<count_1> ``` \"\"\"", "logger.info(f\"Counters tgt:{len(tgt_counter)}\") for feat_name, feat_counter in src_feats_counter.items(): logger.info(f\"Counters {feat_name}:{len(feat_counter)}\") def", "as following and can be pass as `-src_vocab` (and `-tgt_vocab`)", "from onmt.utils.logging import init_logger from onmt.utils.misc import set_random_seed, check_path from", "fields) logger.info(f\"Counter vocab from {opts.n_sample} samples.\") src_counter, tgt_counter, src_feats_counter =", "import set_random_seed, check_path from onmt.utils.parse import ArgumentParser from onmt.opts import", "import make_transforms, get_transforms_cls def build_vocab_main(opts): \"\"\"Apply transforms to samples of", "= build_vocab( opts, transforms, n_sample=opts.n_sample) logger.info(f\"Counters src:{len(src_counter)}\") logger.info(f\"Counters tgt:{len(tgt_counter)}\") for", "opts.share_vocab: src_counter += tgt_counter tgt_counter = src_counter logger.info(f\"Counters after share:{len(src_counter)}\")", "transforms_cls, fields) logger.info(f\"Counter vocab from {opts.n_sample} samples.\") src_counter, tgt_counter, src_feats_counter", "`-tgt_vocab`) when training: ``` <tok_0>\\t<count_0> <tok_1>\\t<count_1> ``` \"\"\" ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True)", "import init_logger from onmt.utils.misc import set_random_seed, check_path from onmt.utils.parse import", "after share:{len(src_counter)}\") save_counter(src_counter, opts.src_vocab) else: save_counter(src_counter, opts.src_vocab) save_counter(tgt_counter, opts.tgt_vocab) for", "opts.src_vocab) else: save_counter(src_counter, opts.src_vocab) save_counter(tgt_counter, opts.tgt_vocab) for k, v in", "fo: for tok, count in counter.most_common(): fo.write(tok + \"\\t\" +", "opts, unknown = parser.parse_known_args() build_vocab_main(opts) if __name__ == '__main__': main()", "logger.info(f\"Counters {feat_name}:{len(feat_counter)}\") def save_counter(counter, save_path): check_path(save_path, exist_ok=opts.overwrite, log=logger.warning) with open(save_path,", "set_random_seed(opts.seed, False) transforms_cls = get_transforms_cls(opts._all_transform) fields = None transforms =", "+ \"\\n\") if opts.share_vocab: src_counter += tgt_counter tgt_counter = src_counter", "in src_feats_counter.items(): save_counter(v, opts.src_feats_vocab[k]) def _get_parser(): parser = ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser,", "that need vocab will be disabled in this. Built vocab", "fields = None transforms = make_transforms(opts, transforms_cls, fields) logger.info(f\"Counter vocab", "def main(): parser = _get_parser() opts, unknown = parser.parse_known_args() build_vocab_main(opts)", "{opts.n_sample} samples.\") src_counter, tgt_counter, src_feats_counter = build_vocab( opts, transforms, n_sample=opts.n_sample)", "opts.tgt_vocab) for k, v in src_feats_counter.items(): save_counter(v, opts.src_feats_vocab[k]) def _get_parser():", "tgt_counter, src_feats_counter = build_vocab( opts, transforms, n_sample=opts.n_sample) logger.info(f\"Counters src:{len(src_counter)}\") logger.info(f\"Counters", "def save_counter(counter, save_path): check_path(save_path, exist_ok=opts.overwrite, log=logger.warning) with open(save_path, \"w\", encoding=\"utf8\")", "be disabled in this. Built vocab is saved in plain", "1, \\ f\"Illegal argument n_sample={opts.n_sample}.\" logger = init_logger() set_random_seed(opts.seed, False)", "<tok_1>\\t<count_1> ``` \"\"\" ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) assert opts.n_sample == -1 or", "parser = ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser, build_vocab_only=True) return parser def main(): parser", "from onmt.inputters.corpus import build_vocab from onmt.transforms import make_transforms, get_transforms_cls def", "== -1 or opts.n_sample > 1, \\ f\"Illegal argument n_sample={opts.n_sample}.\"", "False) transforms_cls = get_transforms_cls(opts._all_transform) fields = None transforms = make_transforms(opts,", "= make_transforms(opts, transforms_cls, fields) logger.info(f\"Counter vocab from {opts.n_sample} samples.\") src_counter,", "main(): parser = _get_parser() opts, unknown = parser.parse_known_args() build_vocab_main(opts) if", "save_counter(tgt_counter, opts.tgt_vocab) for k, v in src_feats_counter.items(): save_counter(v, opts.src_feats_vocab[k]) def", "python \"\"\"Get vocabulary coutings from transformed corpora samples.\"\"\" from onmt.utils.logging", "= _get_parser() opts, unknown = parser.parse_known_args() build_vocab_main(opts) if __name__ ==", "saved in plain text format as following and can be", "init_logger from onmt.utils.misc import set_random_seed, check_path from onmt.utils.parse import ArgumentParser", "#!/usr/bin/env python \"\"\"Get vocabulary coutings from transformed corpora samples.\"\"\" from", "format as following and can be pass as `-src_vocab` (and", "get_transforms_cls(opts._all_transform) fields = None transforms = make_transforms(opts, transforms_cls, fields) logger.info(f\"Counter", "ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) assert opts.n_sample == -1 or opts.n_sample > 1,", "-1 or opts.n_sample > 1, \\ f\"Illegal argument n_sample={opts.n_sample}.\" logger", "src_counter logger.info(f\"Counters after share:{len(src_counter)}\") save_counter(src_counter, opts.src_vocab) else: save_counter(src_counter, opts.src_vocab) save_counter(tgt_counter,", "``` \"\"\" ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) assert opts.n_sample == -1 or opts.n_sample", "argument n_sample={opts.n_sample}.\" logger = init_logger() set_random_seed(opts.seed, False) transforms_cls = get_transforms_cls(opts._all_transform)", "in this. Built vocab is saved in plain text format", "opts.n_sample == -1 or opts.n_sample > 1, \\ f\"Illegal argument", "exist_ok=opts.overwrite, log=logger.warning) with open(save_path, \"w\", encoding=\"utf8\") as fo: for tok,", "log=logger.warning) with open(save_path, \"w\", encoding=\"utf8\") as fo: for tok, count", "parser def main(): parser = _get_parser() opts, unknown = parser.parse_known_args()", "def build_vocab_main(opts): \"\"\"Apply transforms to samples of specified data and", "plain text format as following and can be pass as", "onmt.utils.parse import ArgumentParser from onmt.opts import dynamic_prepare_opts from onmt.inputters.corpus import", "\\ f\"Illegal argument n_sample={opts.n_sample}.\" logger = init_logger() set_random_seed(opts.seed, False) transforms_cls", "\"\\n\") if opts.share_vocab: src_counter += tgt_counter tgt_counter = src_counter logger.info(f\"Counters", "build_vocab_only=True) return parser def main(): parser = _get_parser() opts, unknown", "for feat_name, feat_counter in src_feats_counter.items(): logger.info(f\"Counters {feat_name}:{len(feat_counter)}\") def save_counter(counter, save_path):", "= None transforms = make_transforms(opts, transforms_cls, fields) logger.info(f\"Counter vocab from", "tgt_counter = src_counter logger.info(f\"Counters after share:{len(src_counter)}\") save_counter(src_counter, opts.src_vocab) else: save_counter(src_counter,", "vocabulary coutings from transformed corpora samples.\"\"\" from onmt.utils.logging import init_logger", "it. Transforms that need vocab will be disabled in this.", "Transforms that need vocab will be disabled in this. Built", "build_vocab_only=True) assert opts.n_sample == -1 or opts.n_sample > 1, \\", "logger.info(f\"Counters after share:{len(src_counter)}\") save_counter(src_counter, opts.src_vocab) else: save_counter(src_counter, opts.src_vocab) save_counter(tgt_counter, opts.tgt_vocab)", "disabled in this. Built vocab is saved in plain text" ]
[ "= Cohort(grade, train_years) df = pd.DataFrame() for model in self.models:", "# an experiment that trains models with subsets of the", "features according to their permutation importance rank # like SingleDatasetExperiment,", "grade=main_config.single_grade, train_years=main_config.train_years, test_years=main_config.test_years, compute_train_metrics=False, **kwargs ): train_cohort = Cohort(grade, train_years)", "cur_df['num_feats'] = i df = pd.concat([df, cur_df], ignore_index=True) return df", "use_cache=use_cache ) def perform( self, grade=main_config.single_grade, train_years=main_config.train_years, test_years=main_config.test_years, compute_train_metrics=False, **kwargs", "config.num_feats: dataset_config.feat_whitelist.clear() for feat in feats[:i]: dataset_config.feat_whitelist.append(feat) exp = SingleDatasetExperiment('ignore',", "from schools3.config.data.datasets import dataset_config # an experiment that trains models", "import Cohort from schools3.config import main_config from schools3.config import global_config", "continue train_data = Dataset(train_cohort, self.features_list, model.get_feature_processor(), self.labels) model.train(train_data) feats_exp =", "import SingleDatasetExperiment from schools3.ml.models.tfkeras_model import TFKerasModel from schools3.ml.models.sklearn_model import SklearnModel", "their permutation importance rank # like SingleDatasetExperiment, this works on", "self.features_list, self.labels, [model], self.metrics) feature_names, _, sorted_idxs = feats_exp.get_feature_importances(model, train_data)", "schools3.config.ml.experiments.feat_pruning_experiment_config as config from schools3.config.data.datasets import dataset_config # an experiment", "features_list, labels, models, metrics, use_cache=use_cache ) def perform( self, grade=main_config.single_grade,", "global_config from schools3.data.datasets.dataset import Dataset from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment from", "from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment from schools3.ml.models.tfkeras_model import TFKerasModel from schools3.ml.models.sklearn_model", "[model], self.metrics) cur_df = exp.perform(grade, train_years, test_years, compute_train_metrics=compute_train_metrics, **kwargs) cur_df['num_feats']", "self.models: if not (isinstance(model, SklearnModel) or isinstance(model, TFKerasModel)): continue train_data", "main_config from schools3.config import global_config from schools3.data.datasets.dataset import Dataset from", "from schools3.data.datasets.dataset import Dataset from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment from schools3.ml.experiments.single_dataset_experiment", "[model], self.metrics) feature_names, _, sorted_idxs = feats_exp.get_feature_importances(model, train_data) feats =", "TFKerasModel)): continue train_data = Dataset(train_cohort, self.features_list, model.get_feature_processor(), self.labels) model.train(train_data) feats_exp", "cur_df = exp.perform(grade, train_years, test_years, compute_train_metrics=compute_train_metrics, **kwargs) cur_df['num_feats'] = i", "exp.perform(grade, train_years, test_years, compute_train_metrics=compute_train_metrics, **kwargs) cur_df['num_feats'] = i df =", "feats[:i]: dataset_config.feat_whitelist.append(feat) exp = SingleDatasetExperiment('ignore', self.features_list, self.labels, [model], self.metrics) cur_df", "Dataset(train_cohort, self.features_list, model.get_feature_processor(), self.labels) model.train(train_data) feats_exp = FeatureImportancesExperiment('ignore', self.features_list, self.labels,", "self.features_list, self.labels, [model], self.metrics) cur_df = exp.perform(grade, train_years, test_years, compute_train_metrics=compute_train_metrics,", "import ModelsExperiment from schools3.data.base.cohort import Cohort from schools3.config import main_config", "schools3.data.base.cohort import Cohort from schools3.config import main_config from schools3.config import", "from schools3.data.base.cohort import Cohort from schools3.config import main_config from schools3.config", "= exp.perform(grade, train_years, test_years, compute_train_metrics=compute_train_metrics, **kwargs) cur_df['num_feats'] = i df", "**kwargs) cur_df['num_feats'] = i df = pd.concat([df, cur_df], ignore_index=True) return", "on a specific grade class FeaturePruningExperiment(ModelsExperiment): def __init__( self, name='ignore',", "def perform( self, grade=main_config.single_grade, train_years=main_config.train_years, test_years=main_config.test_years, compute_train_metrics=False, **kwargs ): train_cohort", "Dataset from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment from", "Cohort from schools3.config import main_config from schools3.config import global_config from", "model.get_feature_processor(), self.labels) model.train(train_data) feats_exp = FeatureImportancesExperiment('ignore', self.features_list, self.labels, [model], self.metrics)", "FeaturePruningExperiment(ModelsExperiment): def __init__( self, name='ignore', features_list=main_config.features, labels=main_config.labels, models=main_config.models, metrics=main_config.metrics, use_cache=main_config.use_cache", "schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment from schools3.ml.models.tfkeras_model import", "from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment from schools3.ml.models.tfkeras_model", "train_data) feats = np.flip(feature_names[sorted_idxs]) for i in config.num_feats: dataset_config.feat_whitelist.clear() for", "i in config.num_feats: dataset_config.feat_whitelist.clear() for feat in feats[:i]: dataset_config.feat_whitelist.append(feat) exp", "def __init__( self, name='ignore', features_list=main_config.features, labels=main_config.labels, models=main_config.models, metrics=main_config.metrics, use_cache=main_config.use_cache ):", "schools3.ml.models.tfkeras_model import TFKerasModel from schools3.ml.models.sklearn_model import SklearnModel import schools3.config.ml.experiments.feat_pruning_experiment_config as", "name, features_list, labels, models, metrics, use_cache=use_cache ) def perform( self,", "subsets of the features according to their permutation importance rank", "schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment from schools3.ml.models.tfkeras_model import TFKerasModel from schools3.ml.models.sklearn_model import", "with subsets of the features according to their permutation importance", ") def perform( self, grade=main_config.single_grade, train_years=main_config.train_years, test_years=main_config.test_years, compute_train_metrics=False, **kwargs ):", "= feats_exp.get_feature_importances(model, train_data) feats = np.flip(feature_names[sorted_idxs]) for i in config.num_feats:", "<filename>schools3/ml/experiments/feat_pruning_experiment.py import numpy as np import pandas as pd from", "name='ignore', features_list=main_config.features, labels=main_config.labels, models=main_config.models, metrics=main_config.metrics, use_cache=main_config.use_cache ): super(FeaturePruningExperiment, self).__init__( name,", "a specific grade class FeaturePruningExperiment(ModelsExperiment): def __init__( self, name='ignore', features_list=main_config.features,", "importance rank # like SingleDatasetExperiment, this works on a specific", "of the features according to their permutation importance rank #", "as np import pandas as pd from schools3.ml.experiments.models_experiment import ModelsExperiment", "or isinstance(model, TFKerasModel)): continue train_data = Dataset(train_cohort, self.features_list, model.get_feature_processor(), self.labels)", "sorted_idxs = feats_exp.get_feature_importances(model, train_data) feats = np.flip(feature_names[sorted_idxs]) for i in", "the features according to their permutation importance rank # like", "model.train(train_data) feats_exp = FeatureImportancesExperiment('ignore', self.features_list, self.labels, [model], self.metrics) feature_names, _,", "pd from schools3.ml.experiments.models_experiment import ModelsExperiment from schools3.data.base.cohort import Cohort from", "self.metrics) feature_names, _, sorted_idxs = feats_exp.get_feature_importances(model, train_data) feats = np.flip(feature_names[sorted_idxs])", "schools3.config import main_config from schools3.config import global_config from schools3.data.datasets.dataset import", "import schools3.config.ml.experiments.feat_pruning_experiment_config as config from schools3.config.data.datasets import dataset_config # an", "test_years, compute_train_metrics=compute_train_metrics, **kwargs) cur_df['num_feats'] = i df = pd.concat([df, cur_df],", "as pd from schools3.ml.experiments.models_experiment import ModelsExperiment from schools3.data.base.cohort import Cohort", "feature_names, _, sorted_idxs = feats_exp.get_feature_importances(model, train_data) feats = np.flip(feature_names[sorted_idxs]) for", "not (isinstance(model, SklearnModel) or isinstance(model, TFKerasModel)): continue train_data = Dataset(train_cohort,", "FeatureImportancesExperiment('ignore', self.features_list, self.labels, [model], self.metrics) feature_names, _, sorted_idxs = feats_exp.get_feature_importances(model,", "): train_cohort = Cohort(grade, train_years) df = pd.DataFrame() for model", "experiment that trains models with subsets of the features according", "SingleDatasetExperiment, this works on a specific grade class FeaturePruningExperiment(ModelsExperiment): def", "self, name='ignore', features_list=main_config.features, labels=main_config.labels, models=main_config.models, metrics=main_config.metrics, use_cache=main_config.use_cache ): super(FeaturePruningExperiment, self).__init__(", "import dataset_config # an experiment that trains models with subsets", "metrics=main_config.metrics, use_cache=main_config.use_cache ): super(FeaturePruningExperiment, self).__init__( name, features_list, labels, models, metrics,", "labels, models, metrics, use_cache=use_cache ) def perform( self, grade=main_config.single_grade, train_years=main_config.train_years,", "ModelsExperiment from schools3.data.base.cohort import Cohort from schools3.config import main_config from", "import global_config from schools3.data.datasets.dataset import Dataset from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment", "import Dataset from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment", "_, sorted_idxs = feats_exp.get_feature_importances(model, train_data) feats = np.flip(feature_names[sorted_idxs]) for i", "train_years) df = pd.DataFrame() for model in self.models: if not", "numpy as np import pandas as pd from schools3.ml.experiments.models_experiment import", "SklearnModel import schools3.config.ml.experiments.feat_pruning_experiment_config as config from schools3.config.data.datasets import dataset_config #", "as config from schools3.config.data.datasets import dataset_config # an experiment that", "df = pd.DataFrame() for model in self.models: if not (isinstance(model,", "rank # like SingleDatasetExperiment, this works on a specific grade", "feats = np.flip(feature_names[sorted_idxs]) for i in config.num_feats: dataset_config.feat_whitelist.clear() for feat", "feat in feats[:i]: dataset_config.feat_whitelist.append(feat) exp = SingleDatasetExperiment('ignore', self.features_list, self.labels, [model],", "train_years=main_config.train_years, test_years=main_config.test_years, compute_train_metrics=False, **kwargs ): train_cohort = Cohort(grade, train_years) df", "super(FeaturePruningExperiment, self).__init__( name, features_list, labels, models, metrics, use_cache=use_cache ) def", "np.flip(feature_names[sorted_idxs]) for i in config.num_feats: dataset_config.feat_whitelist.clear() for feat in feats[:i]:", "from schools3.config import main_config from schools3.config import global_config from schools3.data.datasets.dataset", "for i in config.num_feats: dataset_config.feat_whitelist.clear() for feat in feats[:i]: dataset_config.feat_whitelist.append(feat)", "): super(FeaturePruningExperiment, self).__init__( name, features_list, labels, models, metrics, use_cache=use_cache )", "np import pandas as pd from schools3.ml.experiments.models_experiment import ModelsExperiment from", "isinstance(model, TFKerasModel)): continue train_data = Dataset(train_cohort, self.features_list, model.get_feature_processor(), self.labels) model.train(train_data)", "from schools3.ml.experiments.models_experiment import ModelsExperiment from schools3.data.base.cohort import Cohort from schools3.config", "like SingleDatasetExperiment, this works on a specific grade class FeaturePruningExperiment(ModelsExperiment):", "SingleDatasetExperiment from schools3.ml.models.tfkeras_model import TFKerasModel from schools3.ml.models.sklearn_model import SklearnModel import", "labels=main_config.labels, models=main_config.models, metrics=main_config.metrics, use_cache=main_config.use_cache ): super(FeaturePruningExperiment, self).__init__( name, features_list, labels,", "models, metrics, use_cache=use_cache ) def perform( self, grade=main_config.single_grade, train_years=main_config.train_years, test_years=main_config.test_years,", "for feat in feats[:i]: dataset_config.feat_whitelist.append(feat) exp = SingleDatasetExperiment('ignore', self.features_list, self.labels,", "schools3.ml.experiments.models_experiment import ModelsExperiment from schools3.data.base.cohort import Cohort from schools3.config import", "(isinstance(model, SklearnModel) or isinstance(model, TFKerasModel)): continue train_data = Dataset(train_cohort, self.features_list,", "feats_exp.get_feature_importances(model, train_data) feats = np.flip(feature_names[sorted_idxs]) for i in config.num_feats: dataset_config.feat_whitelist.clear()", "TFKerasModel from schools3.ml.models.sklearn_model import SklearnModel import schools3.config.ml.experiments.feat_pruning_experiment_config as config from", "train_data = Dataset(train_cohort, self.features_list, model.get_feature_processor(), self.labels) model.train(train_data) feats_exp = FeatureImportancesExperiment('ignore',", "test_years=main_config.test_years, compute_train_metrics=False, **kwargs ): train_cohort = Cohort(grade, train_years) df =", "to their permutation importance rank # like SingleDatasetExperiment, this works", "for model in self.models: if not (isinstance(model, SklearnModel) or isinstance(model,", "this works on a specific grade class FeaturePruningExperiment(ModelsExperiment): def __init__(", "trains models with subsets of the features according to their", "dataset_config.feat_whitelist.clear() for feat in feats[:i]: dataset_config.feat_whitelist.append(feat) exp = SingleDatasetExperiment('ignore', self.features_list,", "self, grade=main_config.single_grade, train_years=main_config.train_years, test_years=main_config.test_years, compute_train_metrics=False, **kwargs ): train_cohort = Cohort(grade,", "schools3.data.datasets.dataset import Dataset from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment from schools3.ml.experiments.single_dataset_experiment import", "**kwargs ): train_cohort = Cohort(grade, train_years) df = pd.DataFrame() for", "dataset_config.feat_whitelist.append(feat) exp = SingleDatasetExperiment('ignore', self.features_list, self.labels, [model], self.metrics) cur_df =", "schools3.ml.models.sklearn_model import SklearnModel import schools3.config.ml.experiments.feat_pruning_experiment_config as config from schools3.config.data.datasets import", "SklearnModel) or isinstance(model, TFKerasModel)): continue train_data = Dataset(train_cohort, self.features_list, model.get_feature_processor(),", "= pd.DataFrame() for model in self.models: if not (isinstance(model, SklearnModel)", "compute_train_metrics=False, **kwargs ): train_cohort = Cohort(grade, train_years) df = pd.DataFrame()", "from schools3.config import global_config from schools3.data.datasets.dataset import Dataset from schools3.ml.experiments.feat_importances_experiment", "an experiment that trains models with subsets of the features", "if not (isinstance(model, SklearnModel) or isinstance(model, TFKerasModel)): continue train_data =", "feats_exp = FeatureImportancesExperiment('ignore', self.features_list, self.labels, [model], self.metrics) feature_names, _, sorted_idxs", "import main_config from schools3.config import global_config from schools3.data.datasets.dataset import Dataset", "exp = SingleDatasetExperiment('ignore', self.features_list, self.labels, [model], self.metrics) cur_df = exp.perform(grade,", "pandas as pd from schools3.ml.experiments.models_experiment import ModelsExperiment from schools3.data.base.cohort import", "compute_train_metrics=compute_train_metrics, **kwargs) cur_df['num_feats'] = i df = pd.concat([df, cur_df], ignore_index=True)", "schools3.config import global_config from schools3.data.datasets.dataset import Dataset from schools3.ml.experiments.feat_importances_experiment import", "models=main_config.models, metrics=main_config.metrics, use_cache=main_config.use_cache ): super(FeaturePruningExperiment, self).__init__( name, features_list, labels, models,", "according to their permutation importance rank # like SingleDatasetExperiment, this", "works on a specific grade class FeaturePruningExperiment(ModelsExperiment): def __init__( self,", "specific grade class FeaturePruningExperiment(ModelsExperiment): def __init__( self, name='ignore', features_list=main_config.features, labels=main_config.labels,", "Cohort(grade, train_years) df = pd.DataFrame() for model in self.models: if", "in self.models: if not (isinstance(model, SklearnModel) or isinstance(model, TFKerasModel)): continue", "import FeatureImportancesExperiment from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment from schools3.ml.models.tfkeras_model import TFKerasModel", "self).__init__( name, features_list, labels, models, metrics, use_cache=use_cache ) def perform(", "model in self.models: if not (isinstance(model, SklearnModel) or isinstance(model, TFKerasModel)):", "self.labels, [model], self.metrics) cur_df = exp.perform(grade, train_years, test_years, compute_train_metrics=compute_train_metrics, **kwargs)", "models with subsets of the features according to their permutation", "import SklearnModel import schools3.config.ml.experiments.feat_pruning_experiment_config as config from schools3.config.data.datasets import dataset_config", "train_cohort = Cohort(grade, train_years) df = pd.DataFrame() for model in", "perform( self, grade=main_config.single_grade, train_years=main_config.train_years, test_years=main_config.test_years, compute_train_metrics=False, **kwargs ): train_cohort =", "= FeatureImportancesExperiment('ignore', self.features_list, self.labels, [model], self.metrics) feature_names, _, sorted_idxs =", "import numpy as np import pandas as pd from schools3.ml.experiments.models_experiment", "= SingleDatasetExperiment('ignore', self.features_list, self.labels, [model], self.metrics) cur_df = exp.perform(grade, train_years,", "schools3.config.data.datasets import dataset_config # an experiment that trains models with", "self.metrics) cur_df = exp.perform(grade, train_years, test_years, compute_train_metrics=compute_train_metrics, **kwargs) cur_df['num_feats'] =", "from schools3.ml.models.tfkeras_model import TFKerasModel from schools3.ml.models.sklearn_model import SklearnModel import schools3.config.ml.experiments.feat_pruning_experiment_config", "from schools3.ml.models.sklearn_model import SklearnModel import schools3.config.ml.experiments.feat_pruning_experiment_config as config from schools3.config.data.datasets", "train_years, test_years, compute_train_metrics=compute_train_metrics, **kwargs) cur_df['num_feats'] = i df = pd.concat([df,", "import pandas as pd from schools3.ml.experiments.models_experiment import ModelsExperiment from schools3.data.base.cohort", "= np.flip(feature_names[sorted_idxs]) for i in config.num_feats: dataset_config.feat_whitelist.clear() for feat in", "permutation importance rank # like SingleDatasetExperiment, this works on a", "metrics, use_cache=use_cache ) def perform( self, grade=main_config.single_grade, train_years=main_config.train_years, test_years=main_config.test_years, compute_train_metrics=False,", "self.labels, [model], self.metrics) feature_names, _, sorted_idxs = feats_exp.get_feature_importances(model, train_data) feats", "SingleDatasetExperiment('ignore', self.features_list, self.labels, [model], self.metrics) cur_df = exp.perform(grade, train_years, test_years,", "config from schools3.config.data.datasets import dataset_config # an experiment that trains", "__init__( self, name='ignore', features_list=main_config.features, labels=main_config.labels, models=main_config.models, metrics=main_config.metrics, use_cache=main_config.use_cache ): super(FeaturePruningExperiment,", "that trains models with subsets of the features according to", "pd.DataFrame() for model in self.models: if not (isinstance(model, SklearnModel) or", "features_list=main_config.features, labels=main_config.labels, models=main_config.models, metrics=main_config.metrics, use_cache=main_config.use_cache ): super(FeaturePruningExperiment, self).__init__( name, features_list,", "self.labels) model.train(train_data) feats_exp = FeatureImportancesExperiment('ignore', self.features_list, self.labels, [model], self.metrics) feature_names,", "self.features_list, model.get_feature_processor(), self.labels) model.train(train_data) feats_exp = FeatureImportancesExperiment('ignore', self.features_list, self.labels, [model],", "grade class FeaturePruningExperiment(ModelsExperiment): def __init__( self, name='ignore', features_list=main_config.features, labels=main_config.labels, models=main_config.models,", "class FeaturePruningExperiment(ModelsExperiment): def __init__( self, name='ignore', features_list=main_config.features, labels=main_config.labels, models=main_config.models, metrics=main_config.metrics,", "dataset_config # an experiment that trains models with subsets of", "import TFKerasModel from schools3.ml.models.sklearn_model import SklearnModel import schools3.config.ml.experiments.feat_pruning_experiment_config as config", "use_cache=main_config.use_cache ): super(FeaturePruningExperiment, self).__init__( name, features_list, labels, models, metrics, use_cache=use_cache", "= Dataset(train_cohort, self.features_list, model.get_feature_processor(), self.labels) model.train(train_data) feats_exp = FeatureImportancesExperiment('ignore', self.features_list,", "FeatureImportancesExperiment from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment from schools3.ml.models.tfkeras_model import TFKerasModel from", "in config.num_feats: dataset_config.feat_whitelist.clear() for feat in feats[:i]: dataset_config.feat_whitelist.append(feat) exp =", "in feats[:i]: dataset_config.feat_whitelist.append(feat) exp = SingleDatasetExperiment('ignore', self.features_list, self.labels, [model], self.metrics)", "# like SingleDatasetExperiment, this works on a specific grade class" ]
[ "suffix in suffixes: msk_channel = imread(os.path.join(img_path, img_name + suffix)) if", "img_name + suffix)) if len(msk_channel.shape) == 2: msk_channel = np.expand_dims(msk_channel,", "\" \"Check the masks.\") msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel i", "def load_weights(cfg, img_path, img_name, weight_suffixes): a_weights = np.load(os.path.join(img_path, img_name +", "if len(weights_channel.shape) != 3: raise ValueError(\"Weights must be 3-dim here.", "if _.endswith(image_format)] total_count += len(path_list) except OSError: print(\"Directory does not", "= imread(os.path.join(img_path, img_name + suffix)) if len(msk_channel.shape) == 2: msk_channel", "suffix)) if len(weights_channel.shape) == 2: weights_channel = np.expand_dims(weights_channel, axis=-1) if", "_.endswith(image_format)] total_count += len(path_list) except OSError: print(\"Directory does not exist.", ":, :-1] # img = np.roll(img, shift=1, axis=2) # CHECK", "from skimage.io import imread def get_file_count(paths, image_format='.tif'): total_count = 0", "load_weights(cfg, img_path, img_name, weight_suffixes): a_weights = np.load(os.path.join(img_path, img_name + weight_suffixes[0]))", "your weights 1 or more than 3 dimensions? Check the", "= 0 for suffix in suffixes: msk_channel = imread(os.path.join(img_path, img_name", "suffixes: msk_channel = imread(os.path.join(img_path, img_name + suffix)) if len(msk_channel.shape) ==", "msk_channel = imread(os.path.join(img_path, img_name + suffix)) if len(msk_channel.shape) == 2:", "in weight_suffixes: weights_channel = np.load(os.path.join(img_path, img_name + suffix)) if len(weights_channel.shape)", "0 for path in paths: try: path_list = [_ for", "load mask def load_mask(mask_path): mask = imread(mask_path) return mask def", "raise ValueError(\"Mask must be 3-dim here. Does your mask have", "paths: try: path_list = [_ for _ in os.listdir(path) if", "path_list = [_ for _ in os.listdir(path) if _.endswith(image_format)] total_count", "for suffix in weight_suffixes: weights_channel = np.load(os.path.join(img_path, img_name + suffix))", "np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix", "suffix)) if len(msk_channel.shape) == 2: msk_channel = np.expand_dims(msk_channel, axis=-1) if", "= imread(mask_path) return mask def load_mask_from_img(cfg, img_path, img_name, suffixes): a_mask", "img = np.roll(img, shift=1, axis=2) # CHECK IMAGE FORMAT return", "img_path, img_name, weight_suffixes): a_weights = np.load(os.path.join(img_path, img_name + weight_suffixes[0])) weights", "total_count += len(path_list) except OSError: print(\"Directory does not exist. Returned", "imread(img_path) if img.shape[2] == 4: img = img[:, :, :-1]", "0 for suffix in suffixes: msk_channel = imread(os.path.join(img_path, img_name +", "shift=1, axis=2) # CHECK IMAGE FORMAT return img # Function", "msk_channel = np.expand_dims(msk_channel, axis=-1) if len(msk_channel.shape) != 3: raise ValueError(\"Mask", "if len(weights_channel.shape) == 2: weights_channel = np.expand_dims(weights_channel, axis=-1) if len(weights_channel.shape)", "or more than 3 dimensions? Check the weights.\") weights[:, :,", "this path will be 0\") return total_count # Function to", "ValueError(\"Weights must be 3-dim here. Has your weights 1 or", "np.roll(img, shift=1, axis=2) # CHECK IMAGE FORMAT return img #", "be 3-dim here. Has your weights 1 or more than", "import imread def get_file_count(paths, image_format='.tif'): total_count = 0 for path", "= img[:, :, :-1] # img = np.roll(img, shift=1, axis=2)", "suffixes[0])) msk = np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS)) i =", "= 0 for path in paths: try: path_list = [_", "try: path_list = [_ for _ in os.listdir(path) if _.endswith(image_format)]", "get_file_count(paths, image_format='.tif'): total_count = 0 for path in paths: try:", "image_format='.tif'): total_count = 0 for path in paths: try: path_list", "CHECK IMAGE FORMAT return img # Function to load mask", "dimensions? Check the weights.\") weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = weights_channel i", "2: weights_channel = np.expand_dims(weights_channel, axis=-1) if len(weights_channel.shape) != 3: raise", "more than 3 dimensions? \" \"Check the masks.\") msk[:, :,", "np.expand_dims(weights_channel, axis=-1) if len(weights_channel.shape) != 3: raise ValueError(\"Weights must be", "os import numpy as np from skimage.io import imread def", "img = imread(img_path) if img.shape[2] == 4: img = img[:,", "Function to load mask def load_mask(mask_path): mask = imread(mask_path) return", "return msk def load_weights(cfg, img_path, img_name, weight_suffixes): a_weights = np.load(os.path.join(img_path,", "= np.load(os.path.join(img_path, img_name + weight_suffixes[0])) weights = np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes)", "= [_ for _ in os.listdir(path) if _.endswith(image_format)] total_count +=", "len(msk_channel.shape) != 3: raise ValueError(\"Mask must be 3-dim here. Does", "len(path_list) except OSError: print(\"Directory does not exist. Returned file count", "= np.expand_dims(msk_channel, axis=-1) if len(msk_channel.shape) != 3: raise ValueError(\"Mask must", "cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in suffixes: msk_channel =", "return mask def load_mask_from_img(cfg, img_path, img_name, suffixes): a_mask = imread(os.path.join(img_path,", "a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in", "= np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for", "imread(os.path.join(img_path, img_name + suffixes[0])) msk = np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) *", "the weights.\") weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = weights_channel i += cfg.NUMBER_MSK_CHANNELS", "4: img = img[:, :, :-1] # img = np.roll(img,", "a_mask = imread(os.path.join(img_path, img_name + suffixes[0])) msk = np.zeros((a_mask.shape[0], a_mask.shape[1],", "weights 1 or more than 3 dimensions? Check the weights.\")", "be 3-dim here. Does your mask have 1 or more", "len(weights_channel.shape) == 2: weights_channel = np.expand_dims(weights_channel, axis=-1) if len(weights_channel.shape) !=", "than 3 dimensions? \" \"Check the masks.\") msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS]", "i = 0 for suffix in suffixes: msk_channel = imread(os.path.join(img_path,", "len(msk_channel.shape) == 2: msk_channel = np.expand_dims(msk_channel, axis=-1) if len(msk_channel.shape) !=", "len(suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in suffixes:", "mask def load_mask_from_img(cfg, img_path, img_name, suffixes): a_mask = imread(os.path.join(img_path, img_name", "img_name + suffixes[0])) msk = np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS))", "cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in weight_suffixes: weights_channel =", "here. Has your weights 1 or more than 3 dimensions?", "return img # Function to load mask def load_mask(mask_path): mask", "path will be 0\") return total_count # Function to load", "a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in", "does not exist. Returned file count for this path will", "len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in weight_suffixes:", "to load image def load_image(img_path): img = imread(img_path) if img.shape[2]", "* cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in suffixes: msk_channel", "if len(msk_channel.shape) == 2: msk_channel = np.expand_dims(msk_channel, axis=-1) if len(msk_channel.shape)", "= imread(img_path) if img.shape[2] == 4: img = img[:, :,", "axis=-1) if len(msk_channel.shape) != 3: raise ValueError(\"Mask must be 3-dim", "masks.\") msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel i += cfg.NUMBER_MSK_CHANNELS #", "img # Function to load mask def load_mask(mask_path): mask =", "a_weights = np.load(os.path.join(img_path, img_name + weight_suffixes[0])) weights = np.zeros((a_weights.shape[0], a_weights.shape[1],", "\"Check the masks.\") msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel i +=", "load image def load_image(img_path): img = imread(img_path) if img.shape[2] ==", "0 for suffix in weight_suffixes: weights_channel = np.load(os.path.join(img_path, img_name +", "than 3 dimensions? Check the weights.\") weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] =", "for suffix in suffixes: msk_channel = imread(os.path.join(img_path, img_name + suffix))", "weights.\") weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = weights_channel i += cfg.NUMBER_MSK_CHANNELS return", "will be 0\") return total_count # Function to load image", "def get_file_count(paths, image_format='.tif'): total_count = 0 for path in paths:", "np.expand_dims(msk_channel, axis=-1) if len(msk_channel.shape) != 3: raise ValueError(\"Mask must be", "weights_channel = np.expand_dims(weights_channel, axis=-1) if len(weights_channel.shape) != 3: raise ValueError(\"Weights", "in suffixes: msk_channel = imread(os.path.join(img_path, img_name + suffix)) if len(msk_channel.shape)", "+ weight_suffixes[0])) weights = np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS)) i", "load_image(img_path): img = imread(img_path) if img.shape[2] == 4: img =", "img_name, weight_suffixes): a_weights = np.load(os.path.join(img_path, img_name + weight_suffixes[0])) weights =", "0\") return total_count # Function to load image def load_image(img_path):", "exist. Returned file count for this path will be 0\")", "== 4: img = img[:, :, :-1] # img =", "img = img[:, :, :-1] # img = np.roll(img, shift=1,", "img_name + weight_suffixes[0])) weights = np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS))", "weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = weights_channel i += cfg.NUMBER_MSK_CHANNELS return weights", "suffixes): a_mask = imread(os.path.join(img_path, img_name + suffixes[0])) msk = np.zeros((a_mask.shape[0],", "+= len(path_list) except OSError: print(\"Directory does not exist. Returned file", "load_mask_from_img(cfg, img_path, img_name, suffixes): a_mask = imread(os.path.join(img_path, img_name + suffixes[0]))", "have 1 or more than 3 dimensions? \" \"Check the", "or more than 3 dimensions? \" \"Check the masks.\") msk[:,", "* cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in weight_suffixes: weights_channel", "3 dimensions? Check the weights.\") weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = weights_channel", "+ suffixes[0])) msk = np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS)) i", "must be 3-dim here. Does your mask have 1 or", "weights_channel = np.load(os.path.join(img_path, img_name + suffix)) if len(weights_channel.shape) == 2:", "= np.roll(img, shift=1, axis=2) # CHECK IMAGE FORMAT return img", "weight_suffixes): a_weights = np.load(os.path.join(img_path, img_name + weight_suffixes[0])) weights = np.zeros((a_weights.shape[0],", "# Function to load image def load_image(img_path): img = imread(img_path)", "i += cfg.NUMBER_MSK_CHANNELS # print(msk, msk.shape) return msk def load_weights(cfg,", "total_count = 0 for path in paths: try: path_list =", "img_path, img_name, suffixes): a_mask = imread(os.path.join(img_path, img_name + suffixes[0])) msk", "np.load(os.path.join(img_path, img_name + suffix)) if len(weights_channel.shape) == 2: weights_channel =", "Returned file count for this path will be 0\") return", "!= 3: raise ValueError(\"Weights must be 3-dim here. Has your", "import os import numpy as np from skimage.io import imread", "def load_mask(mask_path): mask = imread(mask_path) return mask def load_mask_from_img(cfg, img_path,", "+ suffix)) if len(msk_channel.shape) == 2: msk_channel = np.expand_dims(msk_channel, axis=-1)", "1 or more than 3 dimensions? \" \"Check the masks.\")", "Function to load image def load_image(img_path): img = imread(img_path) if", "if img.shape[2] == 4: img = img[:, :, :-1] #", "mask def load_mask(mask_path): mask = imread(mask_path) return mask def load_mask_from_img(cfg,", "mask have 1 or more than 3 dimensions? \" \"Check", "Does your mask have 1 or more than 3 dimensions?", "for path in paths: try: path_list = [_ for _", "as np from skimage.io import imread def get_file_count(paths, image_format='.tif'): total_count", "dimensions? \" \"Check the masks.\") msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel", "img.shape[2] == 4: img = img[:, :, :-1] # img", "ValueError(\"Mask must be 3-dim here. Does your mask have 1", "FORMAT return img # Function to load mask def load_mask(mask_path):", "raise ValueError(\"Weights must be 3-dim here. Has your weights 1", "msk_channel i += cfg.NUMBER_MSK_CHANNELS # print(msk, msk.shape) return msk def", "= 0 for suffix in weight_suffixes: weights_channel = np.load(os.path.join(img_path, img_name", "# print(msk, msk.shape) return msk def load_weights(cfg, img_path, img_name, weight_suffixes):", "i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel i += cfg.NUMBER_MSK_CHANNELS # print(msk, msk.shape) return", "image def load_image(img_path): img = imread(img_path) if img.shape[2] == 4:", "== 2: msk_channel = np.expand_dims(msk_channel, axis=-1) if len(msk_channel.shape) != 3:", "not exist. Returned file count for this path will be", "# CHECK IMAGE FORMAT return img # Function to load", "for _ in os.listdir(path) if _.endswith(image_format)] total_count += len(path_list) except", "3: raise ValueError(\"Weights must be 3-dim here. Has your weights", "# Function to load mask def load_mask(mask_path): mask = imread(mask_path)", "for this path will be 0\") return total_count # Function", "[_ for _ in os.listdir(path) if _.endswith(image_format)] total_count += len(path_list)", "if len(msk_channel.shape) != 3: raise ValueError(\"Mask must be 3-dim here.", "axis=-1) if len(weights_channel.shape) != 3: raise ValueError(\"Weights must be 3-dim", "IMAGE FORMAT return img # Function to load mask def", "img[:, :, :-1] # img = np.roll(img, shift=1, axis=2) #", "weight_suffixes[0])) weights = np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS)) i =", "weight_suffixes: weights_channel = np.load(os.path.join(img_path, img_name + suffix)) if len(weights_channel.shape) ==", "_ in os.listdir(path) if _.endswith(image_format)] total_count += len(path_list) except OSError:", "= np.load(os.path.join(img_path, img_name + suffix)) if len(weights_channel.shape) == 2: weights_channel", "path in paths: try: path_list = [_ for _ in", "i = 0 for suffix in weight_suffixes: weights_channel = np.load(os.path.join(img_path,", "imread def get_file_count(paths, image_format='.tif'): total_count = 0 for path in", "print(msk, msk.shape) return msk def load_weights(cfg, img_path, img_name, weight_suffixes): a_weights", "= np.expand_dims(weights_channel, axis=-1) if len(weights_channel.shape) != 3: raise ValueError(\"Weights must", "= msk_channel i += cfg.NUMBER_MSK_CHANNELS # print(msk, msk.shape) return msk", "+ suffix)) if len(weights_channel.shape) == 2: weights_channel = np.expand_dims(weights_channel, axis=-1)", "def load_mask_from_img(cfg, img_path, img_name, suffixes): a_mask = imread(os.path.join(img_path, img_name +", "os.listdir(path) if _.endswith(image_format)] total_count += len(path_list) except OSError: print(\"Directory does", "to load mask def load_mask(mask_path): mask = imread(mask_path) return mask", "count for this path will be 0\") return total_count #", "# img = np.roll(img, shift=1, axis=2) # CHECK IMAGE FORMAT", "msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel i += cfg.NUMBER_MSK_CHANNELS # print(msk,", ":-1] # img = np.roll(img, shift=1, axis=2) # CHECK IMAGE", "np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix", "must be 3-dim here. Has your weights 1 or more", "cfg.NUMBER_MSK_CHANNELS # print(msk, msk.shape) return msk def load_weights(cfg, img_path, img_name,", "img_name, suffixes): a_mask = imread(os.path.join(img_path, img_name + suffixes[0])) msk =", "import numpy as np from skimage.io import imread def get_file_count(paths,", "msk = np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0", "file count for this path will be 0\") return total_count", "the masks.\") msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel i += cfg.NUMBER_MSK_CHANNELS", "np.load(os.path.join(img_path, img_name + weight_suffixes[0])) weights = np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) *", "numpy as np from skimage.io import imread def get_file_count(paths, image_format='.tif'):", "axis=2) # CHECK IMAGE FORMAT return img # Function to", "== 2: weights_channel = np.expand_dims(weights_channel, axis=-1) if len(weights_channel.shape) != 3:", "np from skimage.io import imread def get_file_count(paths, image_format='.tif'): total_count =", "= imread(os.path.join(img_path, img_name + suffixes[0])) msk = np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes)", "len(weights_channel.shape) != 3: raise ValueError(\"Weights must be 3-dim here. Has", "3: raise ValueError(\"Mask must be 3-dim here. Does your mask", "def load_image(img_path): img = imread(img_path) if img.shape[2] == 4: img", "+= cfg.NUMBER_MSK_CHANNELS # print(msk, msk.shape) return msk def load_weights(cfg, img_path,", "load_mask(mask_path): mask = imread(mask_path) return mask def load_mask_from_img(cfg, img_path, img_name,", "skimage.io import imread def get_file_count(paths, image_format='.tif'): total_count = 0 for", "return total_count # Function to load image def load_image(img_path): img", "mask = imread(mask_path) return mask def load_mask_from_img(cfg, img_path, img_name, suffixes):", "suffix in weight_suffixes: weights_channel = np.load(os.path.join(img_path, img_name + suffix)) if", "2: msk_channel = np.expand_dims(msk_channel, axis=-1) if len(msk_channel.shape) != 3: raise", "Has your weights 1 or more than 3 dimensions? Check", "here. Does your mask have 1 or more than 3", "in paths: try: path_list = [_ for _ in os.listdir(path)", "except OSError: print(\"Directory does not exist. Returned file count for", "msk def load_weights(cfg, img_path, img_name, weight_suffixes): a_weights = np.load(os.path.join(img_path, img_name", "img_name + suffix)) if len(weights_channel.shape) == 2: weights_channel = np.expand_dims(weights_channel,", "1 or more than 3 dimensions? Check the weights.\") weights[:,", "!= 3: raise ValueError(\"Mask must be 3-dim here. Does your", "msk.shape) return msk def load_weights(cfg, img_path, img_name, weight_suffixes): a_weights =", "imread(os.path.join(img_path, img_name + suffix)) if len(msk_channel.shape) == 2: msk_channel =", "your mask have 1 or more than 3 dimensions? \"", "be 0\") return total_count # Function to load image def", "more than 3 dimensions? Check the weights.\") weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS]", "weights = np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0", "3 dimensions? \" \"Check the masks.\") msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] =", "imread(mask_path) return mask def load_mask_from_img(cfg, img_path, img_name, suffixes): a_mask =", "print(\"Directory does not exist. Returned file count for this path", "Check the weights.\") weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = weights_channel i +=", "OSError: print(\"Directory does not exist. Returned file count for this", "3-dim here. Does your mask have 1 or more than", "= np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for", "3-dim here. Has your weights 1 or more than 3", "in os.listdir(path) if _.endswith(image_format)] total_count += len(path_list) except OSError: print(\"Directory", ":, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel i += cfg.NUMBER_MSK_CHANNELS # print(msk, msk.shape)", "total_count # Function to load image def load_image(img_path): img =" ]
[ "default=None, as_series=False, inplace: bool = False, ) -> Optional[Native]: values", "b: a and not b, whitelist, ) return self.map_zip_values( lambda", "else: yield self.value_series().get_items_no(sliding_window, extend=extend, default=default) def apply_window_func( self, function: Callable,", "window_len=3, exclude_center=False): center = int((window_len - 1) / 2) count", "= self.get_mean() return self.map_values(lambda v: v / rate if rate", "return result def smooth(self, how='linear', *args, **kwargs): method_name = 'smooth_{}'.format(how)", ") def subtract(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x,", "get_sum(self): return sum( self.filter_values_defined().get_values(), ) def get_mean(self): values_defined = self.filter_values_defined().get_values()", "return self.slice(0, -1).subtract( self.shift(-1) ) def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None,", "in a package. from series import series_classes as sc from", "values = map(function, self.get_sliding_window(window, extend=extend, default=default, as_series=as_series)) return self.set_values(values, inplace=inplace)", "= self.subtract(smoothed_series) if rel: deviation = deviation.divide(smoothed_series, default=0) return deviation", "n for n in window] if as_series: yield self.value_series().items_no(sliding_window, extend=extend,", "self.filter_values_defined().get_values() if values_defined: return sum(values_defined) / len(values_defined) def norm(self, rate=None,", "return self.apply_window_func( lambda a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max), window=WINDOW_DEFAULT, extend=True, default=False,", "+ center + 1) if exclude_center: sub_series = sub_series.drop_item_no(center) result.append(sub_series.get_mean(),", "y else default, extend, series, ) def subtract(self, series, default=None,", "return DEFAULT_NUMERIC def get_sum(self): return sum( self.filter_values_defined().get_values(), ) def get_mean(self):", "return sum(values_defined) / len(values_defined) def norm(self, rate=None, default=None): if rate", "window=WINDOW_NEIGHBORS, rel=False): smoothed_series = self.smooth(window=window) deviation = self.subtract(smoothed_series) if rel:", "inplace=inplace) def mark_local_extremums(self, local_min=True, local_max=True): return self.apply_window_func( lambda a: nm.is_local_extremum(*a,", "DEFAULT_NUMERIC def get_sum(self): return sum( self.filter_values_defined().get_values(), ) def get_mean(self): values_defined", "in self.get_values(): if not isinstance(v, (int, float)): return False return", "s: s if t else v, spikes, self.smooth(window=window), ) def", "default=False, ) def mark_local_max(self): return self.mark_local_extremums(local_min=False, local_max=True) def mark_local_min(self): return", "local_max=False) def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False): smoothed_series = self.smooth(window=window) deviation =", "else: return self.slice(0, -1).subtract( self.shift(-1) ) def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True,", "-1).subtract( self.shift(-1) ) def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None, as_series=True): if", "yield self.value_series().items_no(sliding_window, extend=extend, default=default) else: yield self.value_series().get_items_no(sliding_window, extend=extend, default=default) def", "if values_defined: return sum(values_defined) / len(values_defined) def norm(self, rate=None, default=None):", "smooth(self, how='linear', *args, **kwargs): method_name = 'smooth_{}'.format(how) smooth_method = self.__getattribute__(method_name)", "import series_classes as sc from ...utils import numeric as nm", "super().__init__( values=values, validate=validate, name=name, ) @staticmethod def get_distance_func(): return nm.diff", "self.get_mean() return self.map_values(lambda v: v / rate if rate else", "default=0) return deviation # @deprecated def smooth_simple_linear(self, window_len=3, exclude_center=False): center", "self.get_range_numbers(): is_edge = n < center or n >= count", "package. from series import series_classes as sc from utils import", "self.shift(-1) ) def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None, as_series=True): if extend:", "imported, fall back to a local import. from .. import", "return self.map_values(lambda v: v / rate if rate else default)", "default=default, ).crop(0, 1) else: return self.slice(0, -1).subtract( self.shift(-1) ) def", "self.has_valid_items() else: return DEFAULT_NUMERIC def get_sum(self): return sum( self.filter_values_defined().get_values(), )", "if rate else default) def divide(self, series, default=None, extend=False): return", "- 1) / 2) count = self.get_count() result = self.new()", "[center + n for n in window] if as_series: yield", "lambda x, y: x - y if x is not", "rel: deviation = deviation.divide(smoothed_series, default=0) return deviation # @deprecated def", "return smooth_method(*args, **kwargs) def smooth_multiple(self, list_kwargs=[]): series = self for", "None else default, extend, series, ) def derivative(self, extend=False, default=0):", "extend, series, ) def subtract(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values(", "int((window_len - 1) / 2) count = self.get_count() result =", "): super().__init__( values=values, validate=validate, name=name, ) @staticmethod def get_distance_func(): return", "self.map_zip_values( lambda v, t, s: s if t else v,", "return self.map_zip_values( lambda v, t, s: s if t else", "False, ) -> Optional[Native]: values = map(function, self.get_sliding_window(window, extend=extend, default=default,", "validate=validate, name=name, ) @staticmethod def get_distance_func(): return nm.diff def get_errors(self):", "self.get_count() result = self.new() for n in self.get_range_numbers(): is_edge =", "return self.has_valid_items() else: return DEFAULT_NUMERIC def get_sum(self): return sum( self.filter_values_defined().get_values(),", "window=WINDOW_DEFAULT, extend=True, default=None, as_series=True): if extend: n_min = 0 n_max", "Assume we're a sub-module in a package. from series import", "local_min or local_max: deviation = deviation.map_zip_values( lambda x, m: x", "in self.get_range_numbers(): is_edge = n < center or n >=", "def get_errors(self): yield from super().get_errors() if not self.has_valid_items(): yield 'Values", "= True WINDOW_DEFAULT = (-1, 0, 1) WINDOW_WO_CENTER = (-2,", "v, spikes, self.smooth(window=window), ) def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True):", "name=name, ) @staticmethod def get_distance_func(): return nm.diff def get_errors(self): yield", "self.apply_window_func( lambda s: s.get_mean(), window=window, extend=True, default=None, as_series=True, ) def", "series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x /", "import series_classes as sc from utils import numeric as nm", "numeric'.format(self.get_class_name()) def has_valid_items(self): for v in self.get_values(): if not isinstance(v,", "True def is_numeric(self, check=False): if check: return self.has_valid_items() else: return", "n_min = 0 n_max = self.get_count() else: n_min = -", "default) def divide(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x,", "from utils import numeric as nm except ImportError: # Apparently", "return self.map_optionally_extend_zip_values( lambda x, y: x - y if x", "self.filter_values_defined().get_values(), ) def get_mean(self): values_defined = self.filter_values_defined().get_values() if values_defined: return", "= self.get_count() else: n_min = - min(window) n_max = self.get_count()", "sum( self.filter_values_defined().get_values(), ) def get_mean(self): values_defined = self.filter_values_defined().get_values() if values_defined:", "lambda a, b: a and not b, whitelist, ) return", "+ 1) if exclude_center: sub_series = sub_series.drop_item_no(center) result.append(sub_series.get_mean(), inplace=True) return", "is None: rate = self.get_mean() return self.map_values(lambda v: v /", "lambda v, t, s: s if t else v, spikes,", "as_series=True): if extend: n_min = 0 n_max = self.get_count() else:", "kwargs in list_kwargs: series = series.smooth(**kwargs) return series def smooth_linear(self,", "sub_series.drop_item_no(center) result.append(sub_series.get_mean(), inplace=True) return result def smooth(self, how='linear', *args, **kwargs):", "not isinstance(v, (int, float)): return False return True def is_numeric(self,", "is_numeric(self, check=False): if check: return self.has_valid_items() else: return DEFAULT_NUMERIC def", "extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x / y if", "or local_max: deviation = deviation.map_zip_values( lambda x, m: x if", "back to a local import. from .. import series_classes as", "whitelist, ) return self.map_zip_values( lambda v, t, s: s if", "extend: n_min = 0 n_max = self.get_count() else: n_min =", "if as_series: yield self.value_series().items_no(sliding_window, extend=extend, default=default) else: yield self.value_series().get_items_no(sliding_window, extend=extend,", "get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None, as_series=True): if extend: n_min = 0", "default=None): if rate is None: rate = self.get_mean() return self.map_values(lambda", "series import series_classes as sc from utils import numeric as", "# @deprecated def smooth_simple_linear(self, window_len=3, exclude_center=False): center = int((window_len -", "self, function: Callable, window=WINDOW_DEFAULT, extend=True, default=None, as_series=False, inplace: bool =", "return self.set_values(values, inplace=inplace) def mark_local_extremums(self, local_min=True, local_max=True): return self.apply_window_func( lambda", "self.subtract(smoothed_series) if rel: deviation = deviation.divide(smoothed_series, default=0) return deviation #", "(-1, 0, 1) WINDOW_WO_CENTER = (-2, -1, 0, 1, 2)", "list_kwargs: series = series.smooth(**kwargs) return series def smooth_linear(self, window=WINDOW_DEFAULT): return", "deviation = self.deviation_from_neighbors(window=window, rel=True) if local_min or local_max: deviation =", "WINDOW_NEIGHBORS = (-1, 0) class NumericSeries(sc.AnySeries): def __init__( self, values=[],", ") def mark_local_max(self): return self.mark_local_extremums(local_min=False, local_max=True) def mark_local_min(self): return self.mark_local_extremums(local_min=True,", "import. from .. import series_classes as sc from ...utils import", "deviation = self.subtract(smoothed_series) if rel: deviation = deviation.divide(smoothed_series, default=0) return", "return series def smooth_linear(self, window=WINDOW_DEFAULT): return self.apply_window_func( lambda s: s.get_mean(),", "local_max=local_max), window=WINDOW_DEFAULT, extend=True, default=False, ) def mark_local_max(self): return self.mark_local_extremums(local_min=False, local_max=True)", "rate=None, default=None): if rate is None: rate = self.get_mean() return", "window=window, extend=True, default=None, as_series=True, ) def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False,", "except ImportError: # Apparently no higher-level package has been imported,", "x, y: x / y if y else default, extend,", "as_series=True, ) def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None): spikes", "series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x -", "None, self.mark_local_extremums(local_min=local_min, local_max=local_max), ) spikes = deviation.map_values( lambda x: abs(x", "/ rate if rate else default) def divide(self, series, default=None,", "to a local import. from .. import series_classes as sc", "-> Optional[Native]: values = map(function, self.get_sliding_window(window, extend=extend, default=default, as_series=as_series)) return", "t else v, spikes, self.smooth(window=window), ) def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS,", "series.smooth(**kwargs) return series def smooth_linear(self, window=WINDOW_DEFAULT): return self.apply_window_func( lambda s:", "class NumericSeries(sc.AnySeries): def __init__( self, values=[], validate=False, name=None, ): super().__init__(", "a and not b, whitelist, ) return self.map_zip_values( lambda v,", "self.get_sliding_window(window, extend=extend, default=default, as_series=as_series)) return self.set_values(values, inplace=inplace) def mark_local_extremums(self, local_min=True,", "if whitelist: spikes = spikes.map_zip_values( lambda a, b: a and", "extend=True, default=default, ).crop(0, 1) else: return self.slice(0, -1).subtract( self.shift(-1) )", ") def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True): deviation = self.deviation_from_neighbors(window=window,", "not None and y is not None else default, extend,", "def __init__( self, values=[], validate=False, name=None, ): super().__init__( values=values, validate=validate,", "smooth_multiple(self, list_kwargs=[]): series = self for kwargs in list_kwargs: series", "*args, **kwargs): method_name = 'smooth_{}'.format(how) smooth_method = self.__getattribute__(method_name) return smooth_method(*args,", ") def derivative(self, extend=False, default=0): if extend: return self.preface(None).subtract( self,", "else default) def divide(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda", "numeric as nm except ImportError: # Apparently no higher-level package", "smooth_simple_linear(self, window_len=3, exclude_center=False): center = int((window_len - 1) / 2)", "extend=extend, default=default, as_series=as_series)) return self.set_values(values, inplace=inplace) def mark_local_extremums(self, local_min=True, local_max=True):", "n_min = - min(window) n_max = self.get_count() - max(window) for", "**kwargs): method_name = 'smooth_{}'.format(how) smooth_method = self.__getattribute__(method_name) return smooth_method(*args, **kwargs)", "exclude_center: sub_series = sub_series.drop_item_no(center) result.append(sub_series.get_mean(), inplace=True) return result def smooth(self,", "smooth_linear(self, window=WINDOW_DEFAULT): return self.apply_window_func( lambda s: s.get_mean(), window=window, extend=True, default=None,", "/ y if y else default, extend, series, ) def", "= sub_series.drop_item_no(center) result.append(sub_series.get_mean(), inplace=True) return result def smooth(self, how='linear', *args,", "= n < center or n >= count - center", "norm(self, rate=None, default=None): if rate is None: rate = self.get_mean()", "'smooth_{}'.format(how) smooth_method = self.__getattribute__(method_name) return smooth_method(*args, **kwargs) def smooth_multiple(self, list_kwargs=[]):", "= self.mark_spikes(threshold, local_min=local_min, local_max=local_max) if whitelist: spikes = spikes.map_zip_values( lambda", "in list_kwargs: series = series.smooth(**kwargs) return series def smooth_linear(self, window=WINDOW_DEFAULT):", "rate = self.get_mean() return self.map_values(lambda v: v / rate if", "0) > threshold, ) return spikes def plot(self, fmt='-'): nm.plot(self.get_range_numbers(),", "smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None): spikes = self.mark_spikes(threshold, local_min=local_min,", "def mark_local_max(self): return self.mark_local_extremums(local_min=False, local_max=True) def mark_local_min(self): return self.mark_local_extremums(local_min=True, local_max=False)", "sc.AnySeries DEFAULT_NUMERIC = True WINDOW_DEFAULT = (-1, 0, 1) WINDOW_WO_CENTER", "def is_numeric(self, check=False): if check: return self.has_valid_items() else: return DEFAULT_NUMERIC", "isinstance(v, (int, float)): return False return True def is_numeric(self, check=False):", "been imported, fall back to a local import. from ..", ") return self.map_zip_values( lambda v, t, s: s if t", "= deviation.map_zip_values( lambda x, m: x if m else None,", "if is_edge: result.append(self.get_item_no(n), inplace=True) else: sub_series = self.slice(n - center,", "default=default, as_series=as_series)) return self.set_values(values, inplace=inplace) def mark_local_extremums(self, local_min=True, local_max=True): return", "t, s: s if t else v, spikes, self.smooth(window=window), )", "extend=False, default=0): if extend: return self.preface(None).subtract( self, extend=True, default=default, ).crop(0,", "v in self.get_values(): if not isinstance(v, (int, float)): return False", "WINDOW_DEFAULT = (-1, 0, 1) WINDOW_WO_CENTER = (-2, -1, 0,", "= series.smooth(**kwargs) return series def smooth_linear(self, window=WINDOW_DEFAULT): return self.apply_window_func( lambda", "lambda x: abs(x or 0) > threshold, ) return spikes", "def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True): deviation = self.deviation_from_neighbors(window=window, rel=True)", "local_max: deviation = deviation.map_zip_values( lambda x, m: x if m", "derivative(self, extend=False, default=0): if extend: return self.preface(None).subtract( self, extend=True, default=default,", "def smooth(self, how='linear', *args, **kwargs): method_name = 'smooth_{}'.format(how) smooth_method =", "x if m else None, self.mark_local_extremums(local_min=local_min, local_max=local_max), ) spikes =", "def apply_window_func( self, function: Callable, window=WINDOW_DEFAULT, extend=True, default=None, as_series=False, inplace:", "n < center or n >= count - center if", "x is not None and y is not None else", "0 n_max = self.get_count() else: n_min = - min(window) n_max", "lambda s: s.get_mean(), window=window, extend=True, default=None, as_series=True, ) def smooth_spikes(self,", "Native = sc.AnySeries DEFAULT_NUMERIC = True WINDOW_DEFAULT = (-1, 0,", "y is not None else default, extend, series, ) def", "self.value_series().get_items_no(sliding_window, extend=extend, default=default) def apply_window_func( self, function: Callable, window=WINDOW_DEFAULT, extend=True,", "self.mark_spikes(threshold, local_min=local_min, local_max=local_max) if whitelist: spikes = spikes.map_zip_values( lambda a,", "v / rate if rate else default) def divide(self, series,", "map(function, self.get_sliding_window(window, extend=extend, default=default, as_series=as_series)) return self.set_values(values, inplace=inplace) def mark_local_extremums(self,", "return False return True def is_numeric(self, check=False): if check: return", "else: sub_series = self.slice(n - center, n + center +", "extend=True, default=None, as_series=True, ) def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True,", "return sum( self.filter_values_defined().get_values(), ) def get_mean(self): values_defined = self.filter_values_defined().get_values() if", ">= count - center if is_edge: result.append(self.get_item_no(n), inplace=True) else: sub_series", ") spikes = deviation.map_values( lambda x: abs(x or 0) >", "return self.map_optionally_extend_zip_values( lambda x, y: x / y if y", "in window] if as_series: yield self.value_series().items_no(sliding_window, extend=extend, default=default) else: yield", "local_max=local_max), ) spikes = deviation.map_values( lambda x: abs(x or 0)", "nm except ImportError: # Apparently no higher-level package has been", "self.set_values(values, inplace=inplace) def mark_local_extremums(self, local_min=True, local_max=True): return self.apply_window_func( lambda a:", "for n in window] if as_series: yield self.value_series().items_no(sliding_window, extend=extend, default=default)", "- y if x is not None and y is", "sub_series = self.slice(n - center, n + center + 1)", "n >= count - center if is_edge: result.append(self.get_item_no(n), inplace=True) else:", "center, n + center + 1) if exclude_center: sub_series =", "# Apparently no higher-level package has been imported, fall back", "else: return DEFAULT_NUMERIC def get_sum(self): return sum( self.filter_values_defined().get_values(), ) def", "window=WINDOW_NEIGHBORS, local_min=False, local_max=True): deviation = self.deviation_from_neighbors(window=window, rel=True) if local_min or", "center + 1) if exclude_center: sub_series = sub_series.drop_item_no(center) result.append(sub_series.get_mean(), inplace=True)", "m else None, self.mark_local_extremums(local_min=local_min, local_max=local_max), ) spikes = deviation.map_values( lambda", "None: rate = self.get_mean() return self.map_values(lambda v: v / rate", "or n >= count - center if is_edge: result.append(self.get_item_no(n), inplace=True)", "yield self.value_series().get_items_no(sliding_window, extend=extend, default=default) def apply_window_func( self, function: Callable, window=WINDOW_DEFAULT,", "x: abs(x or 0) > threshold, ) return spikes def", "if rate is None: rate = self.get_mean() return self.map_values(lambda v:", "self.smooth(window=window) deviation = self.subtract(smoothed_series) if rel: deviation = deviation.divide(smoothed_series, default=0)", "nm Native = sc.AnySeries DEFAULT_NUMERIC = True WINDOW_DEFAULT = (-1,", "Apparently no higher-level package has been imported, fall back to", "ImportError: # Apparently no higher-level package has been imported, fall", "values=[], validate=False, name=None, ): super().__init__( values=values, validate=validate, name=name, ) @staticmethod", "< center or n >= count - center if is_edge:", "has_valid_items(self): for v in self.get_values(): if not isinstance(v, (int, float)):", "self.mark_local_extremums(local_min=True, local_max=False) def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False): smoothed_series = self.smooth(window=window) deviation", "import Optional, Callable try: # Assume we're a sub-module in", "default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x / y", "local_max=True): return self.apply_window_func( lambda a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max), window=WINDOW_DEFAULT, extend=True,", "sc from utils import numeric as nm except ImportError: #", "if local_min or local_max: deviation = deviation.map_zip_values( lambda x, m:", "as nm except ImportError: # Apparently no higher-level package has", "(-1, 0) class NumericSeries(sc.AnySeries): def __init__( self, values=[], validate=False, name=None,", "2) WINDOW_NEIGHBORS = (-1, 0) class NumericSeries(sc.AnySeries): def __init__( self,", "extend, series, ) def derivative(self, extend=False, default=0): if extend: return", "if rel: deviation = deviation.divide(smoothed_series, default=0) return deviation # @deprecated", "as nm Native = sc.AnySeries DEFAULT_NUMERIC = True WINDOW_DEFAULT =", "nm.diff def get_errors(self): yield from super().get_errors() if not self.has_valid_items(): yield", "/ len(values_defined) def norm(self, rate=None, default=None): if rate is None:", "subtract(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x", "= self.slice(n - center, n + center + 1) if", "get_distance_func(): return nm.diff def get_errors(self): yield from super().get_errors() if not", "for v in self.get_values(): if not isinstance(v, (int, float)): return", "= (-1, 0) class NumericSeries(sc.AnySeries): def __init__( self, values=[], validate=False,", "higher-level package has been imported, fall back to a local", "from ...utils import numeric as nm Native = sc.AnySeries DEFAULT_NUMERIC", "deviation.map_zip_values( lambda x, m: x if m else None, self.mark_local_extremums(local_min=local_min,", "if extend: n_min = 0 n_max = self.get_count() else: n_min", "default, extend, series, ) def subtract(self, series, default=None, extend=False): return", "smooth_method = self.__getattribute__(method_name) return smooth_method(*args, **kwargs) def smooth_multiple(self, list_kwargs=[]): series", "if extend: return self.preface(None).subtract( self, extend=True, default=default, ).crop(0, 1) else:", "def smooth_multiple(self, list_kwargs=[]): series = self for kwargs in list_kwargs:", "lambda x, y: x / y if y else default,", "get_mean(self): values_defined = self.filter_values_defined().get_values() if values_defined: return sum(values_defined) / len(values_defined)", ") @staticmethod def get_distance_func(): return nm.diff def get_errors(self): yield from", "no higher-level package has been imported, fall back to a", "values_defined: return sum(values_defined) / len(values_defined) def norm(self, rate=None, default=None): if", "extend=True, default=False, ) def mark_local_max(self): return self.mark_local_extremums(local_min=False, local_max=True) def mark_local_min(self):", "...utils import numeric as nm Native = sc.AnySeries DEFAULT_NUMERIC =", "sub-module in a package. from series import series_classes as sc", "deviation.map_values( lambda x: abs(x or 0) > threshold, ) return", "default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x - y", "is_edge: result.append(self.get_item_no(n), inplace=True) else: sub_series = self.slice(n - center, n", "extend=True, default=None, as_series=True): if extend: n_min = 0 n_max =", "default=default) def apply_window_func( self, function: Callable, window=WINDOW_DEFAULT, extend=True, default=None, as_series=False,", "a sub-module in a package. from series import series_classes as", "local_max=True) def mark_local_min(self): return self.mark_local_extremums(local_min=True, local_max=False) def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False):", "whitelist: spikes = spikes.map_zip_values( lambda a, b: a and not", "y: x / y if y else default, extend, series,", "deviation.divide(smoothed_series, default=0) return deviation # @deprecated def smooth_simple_linear(self, window_len=3, exclude_center=False):", "sub_series = sub_series.drop_item_no(center) result.append(sub_series.get_mean(), inplace=True) return result def smooth(self, how='linear',", "validate=False, name=None, ): super().__init__( values=values, validate=validate, name=name, ) @staticmethod def", "numeric as nm Native = sc.AnySeries DEFAULT_NUMERIC = True WINDOW_DEFAULT", "a local import. from .. import series_classes as sc from", "spikes = self.mark_spikes(threshold, local_min=local_min, local_max=local_max) if whitelist: spikes = spikes.map_zip_values(", "1) / 2) count = self.get_count() result = self.new() for", "def has_valid_items(self): for v in self.get_values(): if not isinstance(v, (int,", "def mark_local_min(self): return self.mark_local_extremums(local_min=True, local_max=False) def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False): smoothed_series", "self.new() for n in self.get_range_numbers(): is_edge = n < center", "from typing import Optional, Callable try: # Assume we're a", "= self.filter_values_defined().get_values() if values_defined: return sum(values_defined) / len(values_defined) def norm(self,", "get_errors(self): yield from super().get_errors() if not self.has_valid_items(): yield 'Values of", "center = int((window_len - 1) / 2) count = self.get_count()", "or 0) > threshold, ) return spikes def plot(self, fmt='-'):", "self.slice(0, -1).subtract( self.shift(-1) ) def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None, as_series=True):", "list_kwargs=[]): series = self for kwargs in list_kwargs: series =", "super().get_errors() if not self.has_valid_items(): yield 'Values of {} must be", "Optional[Native]: values = map(function, self.get_sliding_window(window, extend=extend, default=default, as_series=as_series)) return self.set_values(values,", "be numeric'.format(self.get_class_name()) def has_valid_items(self): for v in self.get_values(): if not", "as sc from ...utils import numeric as nm Native =", "local_max=local_max) if whitelist: spikes = spikes.map_zip_values( lambda a, b: a", "= sc.AnySeries DEFAULT_NUMERIC = True WINDOW_DEFAULT = (-1, 0, 1)", "mark_local_min(self): return self.mark_local_extremums(local_min=True, local_max=False) def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False): smoothed_series =", "= deviation.divide(smoothed_series, default=0) return deviation # @deprecated def smooth_simple_linear(self, window_len=3,", "def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False): smoothed_series = self.smooth(window=window) deviation = self.subtract(smoothed_series)", "and not b, whitelist, ) return self.map_zip_values( lambda v, t,", "b, whitelist, ) return self.map_zip_values( lambda v, t, s: s", "not b, whitelist, ) return self.map_zip_values( lambda v, t, s:", "series = series.smooth(**kwargs) return series def smooth_linear(self, window=WINDOW_DEFAULT): return self.apply_window_func(", "WINDOW_WO_CENTER = (-2, -1, 0, 1, 2) WINDOW_NEIGHBORS = (-1,", "self.apply_window_func( lambda a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max), window=WINDOW_DEFAULT, extend=True, default=False, )", "extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x - y if", "rate if rate else default) def divide(self, series, default=None, extend=False):", "extend=True, default=None, as_series=False, inplace: bool = False, ) -> Optional[Native]:", "(int, float)): return False return True def is_numeric(self, check=False): if", "y if x is not None and y is not", "if not self.has_valid_items(): yield 'Values of {} must be numeric'.format(self.get_class_name())", "import numeric as nm except ImportError: # Apparently no higher-level", "window=WINDOW_DEFAULT, extend=True, default=False, ) def mark_local_max(self): return self.mark_local_extremums(local_min=False, local_max=True) def", "return self.mark_local_extremums(local_min=False, local_max=True) def mark_local_min(self): return self.mark_local_extremums(local_min=True, local_max=False) def deviation_from_neighbors(self,", "= int((window_len - 1) / 2) count = self.get_count() result", "a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max), window=WINDOW_DEFAULT, extend=True, default=False, ) def mark_local_max(self):", "if m else None, self.mark_local_extremums(local_min=local_min, local_max=local_max), ) spikes = deviation.map_values(", "yield from super().get_errors() if not self.has_valid_items(): yield 'Values of {}", "is_edge = n < center or n >= count -", "center if is_edge: result.append(self.get_item_no(n), inplace=True) else: sub_series = self.slice(n -", "and y is not None else default, extend, series, )", "self.get_count() else: n_min = - min(window) n_max = self.get_count() -", "result.append(sub_series.get_mean(), inplace=True) return result def smooth(self, how='linear', *args, **kwargs): method_name", "x / y if y else default, extend, series, )", "spikes.map_zip_values( lambda a, b: a and not b, whitelist, )", "True WINDOW_DEFAULT = (-1, 0, 1) WINDOW_WO_CENTER = (-2, -1,", "rel=True) if local_min or local_max: deviation = deviation.map_zip_values( lambda x,", "1) if exclude_center: sub_series = sub_series.drop_item_no(center) result.append(sub_series.get_mean(), inplace=True) return result", "n_max = self.get_count() else: n_min = - min(window) n_max =", "DEFAULT_NUMERIC = True WINDOW_DEFAULT = (-1, 0, 1) WINDOW_WO_CENTER =", "mark_local_extremums(self, local_min=True, local_max=True): return self.apply_window_func( lambda a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max),", "if not isinstance(v, (int, float)): return False return True def", "self.get_values(): if not isinstance(v, (int, float)): return False return True", "mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True): deviation = self.deviation_from_neighbors(window=window, rel=True) if", ") def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None, as_series=True): if extend: n_min", "= spikes.map_zip_values( lambda a, b: a and not b, whitelist,", "name=None, ): super().__init__( values=values, validate=validate, name=name, ) @staticmethod def get_distance_func():", "len(values_defined) def norm(self, rate=None, default=None): if rate is None: rate", "@deprecated def smooth_simple_linear(self, window_len=3, exclude_center=False): center = int((window_len - 1)", "inplace: bool = False, ) -> Optional[Native]: values = map(function,", "s.get_mean(), window=window, extend=True, default=None, as_series=True, ) def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER,", "self.map_optionally_extend_zip_values( lambda x, y: x / y if y else", "extend: return self.preface(None).subtract( self, extend=True, default=default, ).crop(0, 1) else: return", "n_max): sliding_window = [center + n for n in window]", "must be numeric'.format(self.get_class_name()) def has_valid_items(self): for v in self.get_values(): if", "y if y else default, extend, series, ) def subtract(self,", "0, 1, 2) WINDOW_NEIGHBORS = (-1, 0) class NumericSeries(sc.AnySeries): def", "(-2, -1, 0, 1, 2) WINDOW_NEIGHBORS = (-1, 0) class", "= (-2, -1, 0, 1, 2) WINDOW_NEIGHBORS = (-1, 0)", "in range(n_min, n_max): sliding_window = [center + n for n", "- max(window) for center in range(n_min, n_max): sliding_window = [center", "deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False): smoothed_series = self.smooth(window=window) deviation = self.subtract(smoothed_series) if", "= self.smooth(window=window) deviation = self.subtract(smoothed_series) if rel: deviation = deviation.divide(smoothed_series,", "- center, n + center + 1) if exclude_center: sub_series", "n in window] if as_series: yield self.value_series().items_no(sliding_window, extend=extend, default=default) else:", "threshold, ) return spikes def plot(self, fmt='-'): nm.plot(self.get_range_numbers(), self.get_values(), fmt=fmt)", "self.map_optionally_extend_zip_values( lambda x, y: x - y if x is", "- min(window) n_max = self.get_count() - max(window) for center in", "center in range(n_min, n_max): sliding_window = [center + n for", "method_name = 'smooth_{}'.format(how) smooth_method = self.__getattribute__(method_name) return smooth_method(*args, **kwargs) def", "is not None and y is not None else default,", "= self.new() for n in self.get_range_numbers(): is_edge = n <", "= (-1, 0, 1) WINDOW_WO_CENTER = (-2, -1, 0, 1,", "x - y if x is not None and y", "if check: return self.has_valid_items() else: return DEFAULT_NUMERIC def get_sum(self): return", "import numeric as nm Native = sc.AnySeries DEFAULT_NUMERIC = True", "function: Callable, window=WINDOW_DEFAULT, extend=True, default=None, as_series=False, inplace: bool = False,", "= self.get_count() - max(window) for center in range(n_min, n_max): sliding_window", "local_min=local_min, local_max=local_max), window=WINDOW_DEFAULT, extend=True, default=False, ) def mark_local_max(self): return self.mark_local_extremums(local_min=False,", "# Assume we're a sub-module in a package. from series", "inplace=True) return result def smooth(self, how='linear', *args, **kwargs): method_name =", "as_series=as_series)) return self.set_values(values, inplace=inplace) def mark_local_extremums(self, local_min=True, local_max=True): return self.apply_window_func(", "self.mark_local_extremums(local_min=local_min, local_max=local_max), ) spikes = deviation.map_values( lambda x: abs(x or", "def smooth_linear(self, window=WINDOW_DEFAULT): return self.apply_window_func( lambda s: s.get_mean(), window=window, extend=True,", "from super().get_errors() if not self.has_valid_items(): yield 'Values of {} must", "divide(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x", "smoothed_series = self.smooth(window=window) deviation = self.subtract(smoothed_series) if rel: deviation =", "typing import Optional, Callable try: # Assume we're a sub-module", "= 'smooth_{}'.format(how) smooth_method = self.__getattribute__(method_name) return smooth_method(*args, **kwargs) def smooth_multiple(self,", "rate is None: rate = self.get_mean() return self.map_values(lambda v: v", "a, b: a and not b, whitelist, ) return self.map_zip_values(", "try: # Assume we're a sub-module in a package. from", "= self.get_count() result = self.new() for n in self.get_range_numbers(): is_edge", "deviation # @deprecated def smooth_simple_linear(self, window_len=3, exclude_center=False): center = int((window_len", "return True def is_numeric(self, check=False): if check: return self.has_valid_items() else:", "local import. from .. import series_classes as sc from ...utils", "default=default) else: yield self.value_series().get_items_no(sliding_window, extend=extend, default=default) def apply_window_func( self, function:", "def mark_local_extremums(self, local_min=True, local_max=True): return self.apply_window_func( lambda a: nm.is_local_extremum(*a, local_min=local_min,", "self.has_valid_items(): yield 'Values of {} must be numeric'.format(self.get_class_name()) def has_valid_items(self):", "self.preface(None).subtract( self, extend=True, default=default, ).crop(0, 1) else: return self.slice(0, -1).subtract(", "return self.apply_window_func( lambda s: s.get_mean(), window=window, extend=True, default=None, as_series=True, )", "as_series: yield self.value_series().items_no(sliding_window, extend=extend, default=default) else: yield self.value_series().get_items_no(sliding_window, extend=extend, default=default)", "m: x if m else None, self.mark_local_extremums(local_min=local_min, local_max=local_max), ) spikes", "extend=extend, default=default) def apply_window_func( self, function: Callable, window=WINDOW_DEFAULT, extend=True, default=None,", "not None else default, extend, series, ) def derivative(self, extend=False,", "lambda a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max), window=WINDOW_DEFAULT, extend=True, default=False, ) def", "if exclude_center: sub_series = sub_series.drop_item_no(center) result.append(sub_series.get_mean(), inplace=True) return result def", "sc from ...utils import numeric as nm Native = sc.AnySeries", "@staticmethod def get_distance_func(): return nm.diff def get_errors(self): yield from super().get_errors()", "check=False): if check: return self.has_valid_items() else: return DEFAULT_NUMERIC def get_sum(self):", "None and y is not None else default, extend, series,", "max(window) for center in range(n_min, n_max): sliding_window = [center +", "window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None): spikes = self.mark_spikes(threshold, local_min=local_min, local_max=local_max) if", "self.value_series().items_no(sliding_window, extend=extend, default=default) else: yield self.value_series().get_items_no(sliding_window, extend=extend, default=default) def apply_window_func(", "__init__( self, values=[], validate=False, name=None, ): super().__init__( values=values, validate=validate, name=name,", "for kwargs in list_kwargs: series = series.smooth(**kwargs) return series def", "if x is not None and y is not None", "Callable try: # Assume we're a sub-module in a package.", ") -> Optional[Native]: values = map(function, self.get_sliding_window(window, extend=extend, default=default, as_series=as_series))", "return self.mark_local_extremums(local_min=True, local_max=False) def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False): smoothed_series = self.smooth(window=window)", "series, ) def derivative(self, extend=False, default=0): if extend: return self.preface(None).subtract(", "nm.is_local_extremum(*a, local_min=local_min, local_max=local_max), window=WINDOW_DEFAULT, extend=True, default=False, ) def mark_local_max(self): return", "local_max=True): deviation = self.deviation_from_neighbors(window=window, rel=True) if local_min or local_max: deviation", "def divide(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y:", "series, ) def subtract(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda", "= self.__getattribute__(method_name) return smooth_method(*args, **kwargs) def smooth_multiple(self, list_kwargs=[]): series =", "default=None, as_series=True, ) def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None):", "0) class NumericSeries(sc.AnySeries): def __init__( self, values=[], validate=False, name=None, ):", "def smooth_simple_linear(self, window_len=3, exclude_center=False): center = int((window_len - 1) /", "utils import numeric as nm except ImportError: # Apparently no", "v, t, s: s if t else v, spikes, self.smooth(window=window),", "window=WINDOW_DEFAULT): return self.apply_window_func( lambda s: s.get_mean(), window=window, extend=True, default=None, as_series=True,", "series = self for kwargs in list_kwargs: series = series.smooth(**kwargs)", "def get_sum(self): return sum( self.filter_values_defined().get_values(), ) def get_mean(self): values_defined =", "def norm(self, rate=None, default=None): if rate is None: rate =", "default, extend, series, ) def derivative(self, extend=False, default=0): if extend:", "else default, extend, series, ) def subtract(self, series, default=None, extend=False):", "def get_distance_func(): return nm.diff def get_errors(self): yield from super().get_errors() if", "default=0): if extend: return self.preface(None).subtract( self, extend=True, default=default, ).crop(0, 1)", "self.slice(n - center, n + center + 1) if exclude_center:", "deviation = deviation.divide(smoothed_series, default=0) return deviation # @deprecated def smooth_simple_linear(self,", "= 0 n_max = self.get_count() else: n_min = - min(window)", "Optional, Callable try: # Assume we're a sub-module in a", "for center in range(n_min, n_max): sliding_window = [center + n", "bool = False, ) -> Optional[Native]: values = map(function, self.get_sliding_window(window,", "inplace=True) else: sub_series = self.slice(n - center, n + center", "threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None): spikes = self.mark_spikes(threshold, local_min=local_min, local_max=local_max)", "= self.deviation_from_neighbors(window=window, rel=True) if local_min or local_max: deviation = deviation.map_zip_values(", "if t else v, spikes, self.smooth(window=window), ) def mark_spikes(self, threshold,", "count - center if is_edge: result.append(self.get_item_no(n), inplace=True) else: sub_series =", "exclude_center=False): center = int((window_len - 1) / 2) count =", "window=WINDOW_DEFAULT, extend=True, default=None, as_series=False, inplace: bool = False, ) ->", "1) else: return self.slice(0, -1).subtract( self.shift(-1) ) def get_sliding_window(self, window=WINDOW_DEFAULT,", "2) count = self.get_count() result = self.new() for n in", "is not None else default, extend, series, ) def derivative(self,", "from series import series_classes as sc from utils import numeric", "as_series=False, inplace: bool = False, ) -> Optional[Native]: values =", "from .. import series_classes as sc from ...utils import numeric", "deviation = deviation.map_zip_values( lambda x, m: x if m else", "1) WINDOW_WO_CENTER = (-2, -1, 0, 1, 2) WINDOW_NEIGHBORS =", "x, y: x - y if x is not None", ".. import series_classes as sc from ...utils import numeric as", "sliding_window = [center + n for n in window] if", "self.get_count() - max(window) for center in range(n_min, n_max): sliding_window =", "= - min(window) n_max = self.get_count() - max(window) for center", "self, extend=True, default=default, ).crop(0, 1) else: return self.slice(0, -1).subtract( self.shift(-1)", "local_min=local_min, local_max=local_max) if whitelist: spikes = spikes.map_zip_values( lambda a, b:", "self.mark_local_extremums(local_min=False, local_max=True) def mark_local_min(self): return self.mark_local_extremums(local_min=True, local_max=False) def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS,", "n + center + 1) if exclude_center: sub_series = sub_series.drop_item_no(center)", "min(window) n_max = self.get_count() - max(window) for center in range(n_min,", "self.__getattribute__(method_name) return smooth_method(*args, **kwargs) def smooth_multiple(self, list_kwargs=[]): series = self", "else None, self.mark_local_extremums(local_min=local_min, local_max=local_max), ) spikes = deviation.map_values( lambda x:", "NumericSeries(sc.AnySeries): def __init__( self, values=[], validate=False, name=None, ): super().__init__( values=values,", "x, m: x if m else None, self.mark_local_extremums(local_min=local_min, local_max=local_max), )", "else default, extend, series, ) def derivative(self, extend=False, default=0): if", "sum(values_defined) / len(values_defined) def norm(self, rate=None, default=None): if rate is", "= self for kwargs in list_kwargs: series = series.smooth(**kwargs) return", "else v, spikes, self.smooth(window=window), ) def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False,", "-1, 0, 1, 2) WINDOW_NEIGHBORS = (-1, 0) class NumericSeries(sc.AnySeries):", "result.append(self.get_item_no(n), inplace=True) else: sub_series = self.slice(n - center, n +", "**kwargs) def smooth_multiple(self, list_kwargs=[]): series = self for kwargs in", "series def smooth_linear(self, window=WINDOW_DEFAULT): return self.apply_window_func( lambda s: s.get_mean(), window=window,", "self.deviation_from_neighbors(window=window, rel=True) if local_min or local_max: deviation = deviation.map_zip_values( lambda", "values_defined = self.filter_values_defined().get_values() if values_defined: return sum(values_defined) / len(values_defined) def", "= deviation.map_values( lambda x: abs(x or 0) > threshold, )", "not self.has_valid_items(): yield 'Values of {} must be numeric'.format(self.get_class_name()) def", "local_min=False, local_max=True): deviation = self.deviation_from_neighbors(window=window, rel=True) if local_min or local_max:", ").crop(0, 1) else: return self.slice(0, -1).subtract( self.shift(-1) ) def get_sliding_window(self,", "threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True): deviation = self.deviation_from_neighbors(window=window, rel=True) if local_min", "whitelist=None): spikes = self.mark_spikes(threshold, local_min=local_min, local_max=local_max) if whitelist: spikes =", "of {} must be numeric'.format(self.get_class_name()) def has_valid_items(self): for v in", "Callable, window=WINDOW_DEFAULT, extend=True, default=None, as_series=False, inplace: bool = False, )", "for n in self.get_range_numbers(): is_edge = n < center or", "return nm.diff def get_errors(self): yield from super().get_errors() if not self.has_valid_items():", "default=None, as_series=True): if extend: n_min = 0 n_max = self.get_count()", "n_max = self.get_count() - max(window) for center in range(n_min, n_max):", "a package. from series import series_classes as sc from utils", "rel=False): smoothed_series = self.smooth(window=window) deviation = self.subtract(smoothed_series) if rel: deviation", "local_max=True, whitelist=None): spikes = self.mark_spikes(threshold, local_min=local_min, local_max=local_max) if whitelist: spikes", "{} must be numeric'.format(self.get_class_name()) def has_valid_items(self): for v in self.get_values():", "def get_mean(self): values_defined = self.filter_values_defined().get_values() if values_defined: return sum(values_defined) /", "we're a sub-module in a package. from series import series_classes", "count = self.get_count() result = self.new() for n in self.get_range_numbers():", "def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None): spikes = self.mark_spikes(threshold,", "else: n_min = - min(window) n_max = self.get_count() - max(window)", "= False, ) -> Optional[Native]: values = map(function, self.get_sliding_window(window, extend=extend,", "extend=extend, default=default) else: yield self.value_series().get_items_no(sliding_window, extend=extend, default=default) def apply_window_func( self,", "local_min=True, local_max=True): return self.apply_window_func( lambda a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max), window=WINDOW_DEFAULT,", "/ 2) count = self.get_count() result = self.new() for n", "range(n_min, n_max): sliding_window = [center + n for n in", "as sc from utils import numeric as nm except ImportError:", "= [center + n for n in window] if as_series:", ") def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None): spikes =", "spikes = spikes.map_zip_values( lambda a, b: a and not b,", "v: v / rate if rate else default) def divide(self,", "float)): return False return True def is_numeric(self, check=False): if check:", "'Values of {} must be numeric'.format(self.get_class_name()) def has_valid_items(self): for v", "fall back to a local import. from .. import series_classes", "mark_local_max(self): return self.mark_local_extremums(local_min=False, local_max=True) def mark_local_min(self): return self.mark_local_extremums(local_min=True, local_max=False) def", "how='linear', *args, **kwargs): method_name = 'smooth_{}'.format(how) smooth_method = self.__getattribute__(method_name) return", "s: s.get_mean(), window=window, extend=True, default=None, as_series=True, ) def smooth_spikes(self, threshold,", ") def get_mean(self): values_defined = self.filter_values_defined().get_values() if values_defined: return sum(values_defined)", "self, values=[], validate=False, name=None, ): super().__init__( values=values, validate=validate, name=name, )", "values=values, validate=validate, name=name, ) @staticmethod def get_distance_func(): return nm.diff def", "has been imported, fall back to a local import. from", "series_classes as sc from ...utils import numeric as nm Native", "def subtract(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y:", "spikes = deviation.map_values( lambda x: abs(x or 0) > threshold,", "return self.preface(None).subtract( self, extend=True, default=default, ).crop(0, 1) else: return self.slice(0,", "result def smooth(self, how='linear', *args, **kwargs): method_name = 'smooth_{}'.format(how) smooth_method", "- center if is_edge: result.append(self.get_item_no(n), inplace=True) else: sub_series = self.slice(n", "local_min=False, local_max=True, whitelist=None): spikes = self.mark_spikes(threshold, local_min=local_min, local_max=local_max) if whitelist:", "series_classes as sc from utils import numeric as nm except", "def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None, as_series=True): if extend: n_min =", "self for kwargs in list_kwargs: series = series.smooth(**kwargs) return series", "n in self.get_range_numbers(): is_edge = n < center or n", "return deviation # @deprecated def smooth_simple_linear(self, window_len=3, exclude_center=False): center =", "self.map_values(lambda v: v / rate if rate else default) def", "yield 'Values of {} must be numeric'.format(self.get_class_name()) def has_valid_items(self): for", "center or n >= count - center if is_edge: result.append(self.get_item_no(n),", "y: x - y if x is not None and", "smooth_method(*args, **kwargs) def smooth_multiple(self, list_kwargs=[]): series = self for kwargs", "lambda x, m: x if m else None, self.mark_local_extremums(local_min=local_min, local_max=local_max),", "spikes, self.smooth(window=window), ) def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True): deviation", "self.smooth(window=window), ) def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True): deviation =", "= map(function, self.get_sliding_window(window, extend=extend, default=default, as_series=as_series)) return self.set_values(values, inplace=inplace) def", "0, 1) WINDOW_WO_CENTER = (-2, -1, 0, 1, 2) WINDOW_NEIGHBORS", "def derivative(self, extend=False, default=0): if extend: return self.preface(None).subtract( self, extend=True,", "abs(x or 0) > threshold, ) return spikes def plot(self,", "check: return self.has_valid_items() else: return DEFAULT_NUMERIC def get_sum(self): return sum(", "1, 2) WINDOW_NEIGHBORS = (-1, 0) class NumericSeries(sc.AnySeries): def __init__(", "apply_window_func( self, function: Callable, window=WINDOW_DEFAULT, extend=True, default=None, as_series=False, inplace: bool", "window] if as_series: yield self.value_series().items_no(sliding_window, extend=extend, default=default) else: yield self.value_series().get_items_no(sliding_window,", "False return True def is_numeric(self, check=False): if check: return self.has_valid_items()", "result = self.new() for n in self.get_range_numbers(): is_edge = n", "s if t else v, spikes, self.smooth(window=window), ) def mark_spikes(self,", "> threshold, ) return spikes def plot(self, fmt='-'): nm.plot(self.get_range_numbers(), self.get_values(),", "rate else default) def divide(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values(", "package has been imported, fall back to a local import.", "+ n for n in window] if as_series: yield self.value_series().items_no(sliding_window,", "if y else default, extend, series, ) def subtract(self, series," ]
[ "= {} for link_path in temp: json_file = link_path +", "message): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict = await", "= \"/\".join([self.video_dir, str(year), cid, vid, \"info.json\"]) _dict = await self.read_json(json_file)", "if self.write_json(json_file, _dict): return True return False async def encode_error(self,", "= \"/\".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") result =", "result = [] for json_file in json_files_path: temp = await", "return False # jsonの更新 _dict[\"title\"] = title _dict[\"explanation\"] = explanation", "= \"/\".join([folderpath, \"info.json\"]) _dict = await self.read_json(json_file) if not _dict:", "_dict): return True return False async def get_all_info(self): json_files_path =", "= await self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\", recursive=True) result = [] for json_file", "# 既存のjsonを読み込み json_file = \"/\".join([self.video_dir, str(year), cid, vid, \"info.json\"]) _dict", "cid, vid, \"info.json\"]) _dict = await self.read_json(json_file) if not _dict:", "_video_dir = \"/\".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") result", "async def encode_error(self, folderpath, message): # 既存のjsonを読み込み json_file = \"/\".join([folderpath,", "[] for info in video_info: if len(info[\"encode_tasks\"]) > 0: result.append(info)", ".filemanager import filemanager_class class database_class(filemanager_class): def __init__(self): filemanager_class.__init__(self) async def", "_dict: return False if result: # 画質の追加 _dict[\"resolution\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") else:", "str(year), cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") result = {} for", "self.read_json(json_file) if not _dict: return False if f\"{resolution}p\" in _dict[\"resolution\"]:", "= await self.read_json(json_file) if not _dict: pass else: result[link_path.split(\"/\")[-1]] =", "return True return False async def get_all_info(self): json_files_path = await", "self.write_json(json_file, _dict): return True return False async def encode_error(self, folderpath,", "json_files_path: temp = await self.read_json(json_file) directory = \"/\".join(json_file.split(\"/\")[:-1]) temp[\"video_directory\"] =", "\"info.json\"]) _dict = await self.read_json(json_file) if not _dict: return False", "temp[\"video_file_name\"] = None result.append(temp) return result async def get_encode_tasks(self): video_info", "await self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\", recursive=True) result = [] for json_file in", "self.async_wrap(glob.glob)(f\"{_video_dir}/*\") return [video_id.split(\"/\")[-1] for video_id in temp] async def list_link(self,", "directory = \"/\".join(json_file.split(\"/\")[:-1]) temp[\"video_directory\"] = directory try: temp[\"video_file_name\"] = glob.glob(", "try: temp[\"video_file_name\"] = glob.glob( f\"{directory}/1.*\")[0].split(\"/\")[-1] except IndexError: temp[\"video_file_name\"] = None", "return False # 画質の追加 _dict[\"encode_error\"].append(f\"{message}\") # jsonの書き込み if self.write_json(json_file, _dict):", "vid, title, explanation): # 既存のjsonを読み込み json_file = \"/\".join([self.video_dir, str(year), cid,", "database_class(filemanager_class): def __init__(self): filemanager_class.__init__(self) async def update_info(self, year, cid, vid,", "= explanation # jsonの書き込み if self.write_json(json_file, _dict): return True return", "= [] for info in video_info: if len(info[\"encode_tasks\"]) > 0:", "info in video_info: if len(info[\"encode_tasks\"]) > 0: result.append(info) return result", "f\"{directory}/1.*\")[0].split(\"/\")[-1] except IndexError: temp[\"video_file_name\"] = None result.append(temp) return result async", "self.write_json(json_file, _dict): return True return False async def encode_result(self, folderpath,", "False # 画質の追加 _dict[\"encode_error\"].append(f\"{message}\") # jsonの書き込み if self.write_json(json_file, _dict): return", "_dict[\"title\"] = title _dict[\"explanation\"] = explanation # jsonの書き込み if self.write_json(json_file,", "json_file in json_files_path: temp = await self.read_json(json_file) directory = \"/\".join(json_file.split(\"/\")[:-1])", "プレイリストに書き込み playlist = \"/\".join([folderpath, \"playlist.m3u8\"]) await self.write_playlist(playlist, resolution) async def", "IndexError: temp[\"video_file_name\"] = None result.append(temp) return result async def get_encode_tasks(self):", "= await self.get_all_info() result = [] for info in video_info:", "if len(info[\"encode_tasks\"]) > 0: result.append(info) return result async def list_video_id(self,", "= \"/\".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") return [video_id.split(\"/\")[-1]", "{} for link_path in temp: json_file = link_path + \"/info.json\"", "_dict = await self.read_json(json_file) if not _dict: pass else: result[link_path.split(\"/\")[-1]]", "in temp] async def list_link(self, year, cid): _video_dir = \"/\".join([self.video_dir,", "\"/\".join([folderpath, \"info.json\"]) _dict = await self.read_json(json_file) if not _dict: return", "not _dict: return False if result: # 画質の追加 _dict[\"resolution\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\")", "temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") return [video_id.split(\"/\")[-1] for video_id in temp]", "year, cid, vid, title, explanation): # 既存のjsonを読み込み json_file = \"/\".join([self.video_dir,", "= [] for json_file in json_files_path: temp = await self.read_json(json_file)", "temp] async def list_link(self, year, cid): _video_dir = \"/\".join([self.video_dir, str(year),", "resolution) async def encode_task(self, folderpath, resolution): # 既存のjsonを読み込み json_file =", "jsonの書き込み self.write_json(json_file, _dict) # プレイリストに書き込み playlist = \"/\".join([folderpath, \"playlist.m3u8\"]) await", "# 画質の追加 _dict[\"encode_error\"].append(f\"{message}\") # jsonの書き込み if self.write_json(json_file, _dict): return True", "json_file = link_path + \"/info.json\" _dict = await self.read_json(json_file) if", "link_path + \"/info.json\" _dict = await self.read_json(json_file) if not _dict:", "get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\", recursive=True) result = []", "\"/\".join([folderpath, \"playlist.m3u8\"]) await self.write_playlist(playlist, resolution) async def encode_task(self, folderpath, resolution):", "link_path in temp: json_file = link_path + \"/info.json\" _dict =", "encode_result(self, folderpath, resolution, result=True): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"])", "= await self.read_json(json_file) if not _dict: return False if f\"{resolution}p\"", "_dict[\"resolution\"]: return True # 画質の追加 _dict[\"encode_tasks\"].append(f\"{resolution}p\") # jsonの書き込み if self.write_json(json_file,", "return [video_id.split(\"/\")[-1] for video_id in temp] async def list_link(self, year,", "if f\"{resolution}p\" in _dict[\"resolution\"]: return True # 画質の追加 _dict[\"encode_tasks\"].append(f\"{resolution}p\") #", "json_file = \"/\".join([folderpath, \"info.json\"]) _dict = await self.read_json(json_file) if not", "explanation # jsonの書き込み if self.write_json(json_file, _dict): return True return False", "cid, vid, title, explanation): # 既存のjsonを読み込み json_file = \"/\".join([self.video_dir, str(year),", "await self.read_json(json_file) if not _dict: return False # jsonの更新 _dict[\"title\"]", "temp[\"video_file_name\"] = glob.glob( f\"{directory}/1.*\")[0].split(\"/\")[-1] except IndexError: temp[\"video_file_name\"] = None result.append(temp)", "pathlib from .filemanager import filemanager_class class database_class(filemanager_class): def __init__(self): filemanager_class.__init__(self)", "True return False async def encode_result(self, folderpath, resolution, result=True): #", "False async def encode_result(self, folderpath, resolution, result=True): # 既存のjsonを読み込み json_file", "list_link(self, year, cid): _video_dir = \"/\".join([self.video_dir, str(year), cid]) temp =", "_dict) # プレイリストに書き込み playlist = \"/\".join([folderpath, \"playlist.m3u8\"]) await self.write_playlist(playlist, resolution)", "= await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") return [video_id.split(\"/\")[-1] for video_id in temp] async", "temp: json_file = link_path + \"/info.json\" _dict = await self.read_json(json_file)", "True return False async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)(", "既存のjsonを読み込み json_file = \"/\".join([self.video_dir, str(year), cid, vid, \"info.json\"]) _dict =", "async def encode_task(self, folderpath, resolution): # 既存のjsonを読み込み json_file = \"/\".join([folderpath,", "False # jsonの更新 _dict[\"title\"] = title _dict[\"explanation\"] = explanation #", "_dict): return True return False async def encode_result(self, folderpath, resolution,", "return False if f\"{resolution}p\" in _dict[\"resolution\"]: return True # 画質の追加", "pass else: result[link_path.split(\"/\")[-1]] = _dict return result async def get_all_info(self):", "_dict return result async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)(", "def update_info(self, year, cid, vid, title, explanation): # 既存のjsonを読み込み json_file", "# プレイリストに書き込み playlist = \"/\".join([folderpath, \"playlist.m3u8\"]) await self.write_playlist(playlist, resolution) async", "else: _dict[\"encode_error\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") # jsonの書き込み self.write_json(json_file, _dict) # プレイリストに書き込み playlist", "str(year), cid, vid, \"info.json\"]) _dict = await self.read_json(json_file) if not", "# 画質の追加 _dict[\"encode_tasks\"].append(f\"{resolution}p\") # jsonの書き込み if self.write_json(json_file, _dict): return True", "True return False async def encode_error(self, folderpath, message): # 既存のjsonを読み込み", "def __init__(self): filemanager_class.__init__(self) async def update_info(self, year, cid, vid, title,", "filemanager_class.__init__(self) async def update_info(self, year, cid, vid, title, explanation): #", "else: result[link_path.split(\"/\")[-1]] = _dict return result async def get_all_info(self): json_files_path", "in video_info: if len(info[\"encode_tasks\"]) > 0: result.append(info) return result async", "= _dict return result async def get_all_info(self): json_files_path = await", "not _dict: return False # jsonの更新 _dict[\"title\"] = title _dict[\"explanation\"]", "json_file = \"/\".join([self.video_dir, str(year), cid, vid, \"info.json\"]) _dict = await", "self.write_json(json_file, _dict): return True return False async def get_all_info(self): json_files_path", "= \"/\".join(json_file.split(\"/\")[:-1]) temp[\"video_directory\"] = directory try: temp[\"video_file_name\"] = glob.glob( f\"{directory}/1.*\")[0].split(\"/\")[-1]", "in json_files_path: temp = await self.read_json(json_file) directory = \"/\".join(json_file.split(\"/\")[:-1]) temp[\"video_directory\"]", "result.append(info) return result async def list_video_id(self, year, cid): _video_dir =", "[video_id.split(\"/\")[-1] for video_id in temp] async def list_link(self, year, cid):", "not _dict: pass else: result[link_path.split(\"/\")[-1]] = _dict return result async", "cid): _video_dir = \"/\".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\")", "if self.write_json(json_file, _dict): return True return False async def encode_result(self,", "def encode_error(self, folderpath, message): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"])", "folderpath, message): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict =", "_dict = await self.read_json(json_file) if not _dict: return False if", "vid, \"info.json\"]) _dict = await self.read_json(json_file) if not _dict: return", "async def get_encode_tasks(self): video_info = await self.get_all_info() result = []", "await self.read_json(json_file) if not _dict: return False if result: #", "result[link_path.split(\"/\")[-1]] = _dict return result async def get_all_info(self): json_files_path =", "await self.read_json(json_file) directory = \"/\".join(json_file.split(\"/\")[:-1]) temp[\"video_directory\"] = directory try: temp[\"video_file_name\"]", "= directory try: temp[\"video_file_name\"] = glob.glob( f\"{directory}/1.*\")[0].split(\"/\")[-1] except IndexError: temp[\"video_file_name\"]", "video_id in temp] async def list_link(self, year, cid): _video_dir =", "if result: # 画質の追加 _dict[\"resolution\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") else: _dict[\"encode_error\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") #", "cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") return [video_id.split(\"/\")[-1] for video_id in", "\"/\".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") return [video_id.split(\"/\")[-1] for", "\"playlist.m3u8\"]) await self.write_playlist(playlist, resolution) async def encode_task(self, folderpath, resolution): #", "画質の追加 _dict[\"encode_tasks\"].append(f\"{resolution}p\") # jsonの書き込み if self.write_json(json_file, _dict): return True return", "\"/\".join(json_file.split(\"/\")[:-1]) temp[\"video_directory\"] = directory try: temp[\"video_file_name\"] = glob.glob( f\"{directory}/1.*\")[0].split(\"/\")[-1] except", "result async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\", recursive=True)", "def encode_result(self, folderpath, resolution, result=True): # 既存のjsonを読み込み json_file = \"/\".join([folderpath,", "self.write_playlist(playlist, resolution) async def encode_task(self, folderpath, resolution): # 既存のjsonを読み込み json_file", "_dict: return False if f\"{resolution}p\" in _dict[\"resolution\"]: return True #", "return False async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\",", "recursive=True) result = [] for json_file in json_files_path: temp =", "await self.get_all_info() result = [] for info in video_info: if", "# 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict = await self.read_json(json_file)", "in temp: json_file = link_path + \"/info.json\" _dict = await", "0: result.append(info) return result async def list_video_id(self, year, cid): _video_dir", "if not _dict: pass else: result[link_path.split(\"/\")[-1]] = _dict return result", "self.read_json(json_file) if not _dict: return False # 画質の追加 _dict[\"encode_error\"].append(f\"{message}\") #", "await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") return [video_id.split(\"/\")[-1] for video_id in temp] async def", "await self.write_playlist(playlist, resolution) async def encode_task(self, folderpath, resolution): # 既存のjsonを読み込み", "if not _dict: return False if f\"{resolution}p\" in _dict[\"resolution\"]: return", "> 0: result.append(info) return result async def list_video_id(self, year, cid):", "for video_id in temp] async def list_link(self, year, cid): _video_dir", "# jsonの書き込み if self.write_json(json_file, _dict): return True return False async", "jsonの書き込み if self.write_json(json_file, _dict): return True return False async def", "result: # 画質の追加 _dict[\"resolution\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") else: _dict[\"encode_error\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") # jsonの書き込み", "await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") result = {} for link_path in temp: json_file", "[] for json_file in json_files_path: temp = await self.read_json(json_file) directory", "get_encode_tasks(self): video_info = await self.get_all_info() result = [] for info", "for json_file in json_files_path: temp = await self.read_json(json_file) directory =", "# jsonの書き込み self.write_json(json_file, _dict) # プレイリストに書き込み playlist = \"/\".join([folderpath, \"playlist.m3u8\"])", "= await self.read_json(json_file) if not _dict: return False # 画質の追加", "_dict[\"explanation\"] = explanation # jsonの書き込み if self.write_json(json_file, _dict): return True", "= title _dict[\"explanation\"] = explanation # jsonの書き込み if self.write_json(json_file, _dict):", "画質の追加 _dict[\"resolution\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") else: _dict[\"encode_error\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") # jsonの書き込み self.write_json(json_file, _dict)", "= await self.read_json(json_file) if not _dict: return False if result:", "_dict[\"resolution\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") else: _dict[\"encode_error\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") # jsonの書き込み self.write_json(json_file, _dict) #", "f\"{resolution}p\" in _dict[\"resolution\"]: return True # 画質の追加 _dict[\"encode_tasks\"].append(f\"{resolution}p\") # jsonの書き込み", "画質の追加 _dict[\"encode_error\"].append(f\"{message}\") # jsonの書き込み if self.write_json(json_file, _dict): return True return", "_dict[\"encode_error\"].append(f\"{message}\") # jsonの書き込み if self.write_json(json_file, _dict): return True return False", "temp[\"video_directory\"] = directory try: temp[\"video_file_name\"] = glob.glob( f\"{directory}/1.*\")[0].split(\"/\")[-1] except IndexError:", "_dict): return True return False async def encode_error(self, folderpath, message):", "not _dict: return False # 画質の追加 _dict[\"encode_error\"].append(f\"{message}\") # jsonの書き込み if", "glob.glob( f\"{directory}/1.*\")[0].split(\"/\")[-1] except IndexError: temp[\"video_file_name\"] = None result.append(temp) return result", "_dict: return False # 画質の追加 _dict[\"encode_error\"].append(f\"{message}\") # jsonの書き込み if self.write_json(json_file,", "for info in video_info: if len(info[\"encode_tasks\"]) > 0: result.append(info) return", "in video_info: if len(info[\"encode_tasks\"]) > 0: result.append(info) return result database", "def get_encode_tasks(self): video_info = await self.get_all_info() result = [] for", "= await self.read_json(json_file) if not _dict: return False # jsonの更新", "if not _dict: return False if result: # 画質の追加 _dict[\"resolution\"].append(f\"{resolution}p\")", "for link_path in temp: json_file = link_path + \"/info.json\" _dict", "_dict[\"encode_tasks\"].remove(f\"{resolution}p\") # jsonの書き込み self.write_json(json_file, _dict) # プレイリストに書き込み playlist = \"/\".join([folderpath,", "result = {} for link_path in temp: json_file = link_path", "jsonの更新 _dict[\"title\"] = title _dict[\"explanation\"] = explanation # jsonの書き込み if", "encode_error(self, folderpath, message): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict", "await self.read_json(json_file) if not _dict: pass else: result[link_path.split(\"/\")[-1]] = _dict", "= await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") result = {} for link_path in temp:", "= await self.read_json(json_file) directory = \"/\".join(json_file.split(\"/\")[:-1]) temp[\"video_directory\"] = directory try:", "_dict: pass else: result[link_path.split(\"/\")[-1]] = _dict return result async def", "__init__(self): filemanager_class.__init__(self) async def update_info(self, year, cid, vid, title, explanation):", "if not _dict: return False # 画質の追加 _dict[\"encode_error\"].append(f\"{message}\") # jsonの書き込み", "False async def encode_error(self, folderpath, message): # 既存のjsonを読み込み json_file =", "\"/\".join([self.video_dir, str(year), cid, vid, \"info.json\"]) _dict = await self.read_json(json_file) if", "import glob import pathlib from .filemanager import filemanager_class class database_class(filemanager_class):", "temp = await self.read_json(json_file) directory = \"/\".join(json_file.split(\"/\")[:-1]) temp[\"video_directory\"] = directory", "async def list_link(self, year, cid): _video_dir = \"/\".join([self.video_dir, str(year), cid])", "f\"./{self.video_dir}/**/info.json\", recursive=True) result = [] for json_file in json_files_path: temp", "class database_class(filemanager_class): def __init__(self): filemanager_class.__init__(self) async def update_info(self, year, cid,", "async def list_video_id(self, year, cid): _video_dir = \"/\".join([self.video_dir, str(year), cid])", "cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") result = {} for link_path", "title, explanation): # 既存のjsonを読み込み json_file = \"/\".join([self.video_dir, str(year), cid, vid,", "return False async def encode_error(self, folderpath, message): # 既存のjsonを読み込み json_file", "folderpath, resolution): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict =", "def list_link(self, year, cid): _video_dir = \"/\".join([self.video_dir, str(year), cid]) temp", "folderpath, resolution, result=True): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict", "list_video_id(self, year, cid): _video_dir = \"/\".join([self.video_dir, str(year), cid]) temp =", "result=True): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict = await", "self.read_json(json_file) directory = \"/\".join(json_file.split(\"/\")[:-1]) temp[\"video_directory\"] = directory try: temp[\"video_file_name\"] =", "result = [] for info in video_info: if len(info[\"encode_tasks\"]) >", "if not _dict: return False # jsonの更新 _dict[\"title\"] = title", "_dict = await self.read_json(json_file) if not _dict: return False #", "from .filemanager import filemanager_class class database_class(filemanager_class): def __init__(self): filemanager_class.__init__(self) async", "_video_dir = \"/\".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") return", "= \"/\".join([folderpath, \"playlist.m3u8\"]) await self.write_playlist(playlist, resolution) async def encode_task(self, folderpath,", "return result async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\",", "self.get_all_info() result = [] for info in video_info: if len(info[\"encode_tasks\"])", "= link_path + \"/info.json\" _dict = await self.read_json(json_file) if not", "False if result: # 画質の追加 _dict[\"resolution\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") else: _dict[\"encode_error\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\")", "+ \"/info.json\" _dict = await self.read_json(json_file) if not _dict: pass", "filemanager_class class database_class(filemanager_class): def __init__(self): filemanager_class.__init__(self) async def update_info(self, year,", "encode_task(self, folderpath, resolution): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict", "explanation): # 既存のjsonを読み込み json_file = \"/\".join([self.video_dir, str(year), cid, vid, \"info.json\"])", "await self.read_json(json_file) if not _dict: return False # 画質の追加 _dict[\"encode_error\"].append(f\"{message}\")", "title _dict[\"explanation\"] = explanation # jsonの書き込み if self.write_json(json_file, _dict): return", "# 画質の追加 _dict[\"resolution\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") else: _dict[\"encode_error\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") # jsonの書き込み self.write_json(json_file,", "return True return False async def encode_error(self, folderpath, message): #", "return True # 画質の追加 _dict[\"encode_tasks\"].append(f\"{resolution}p\") # jsonの書き込み if self.write_json(json_file, _dict):", "self.read_json(json_file) if not _dict: return False # jsonの更新 _dict[\"title\"] =", "_dict[\"encode_tasks\"].append(f\"{resolution}p\") # jsonの書き込み if self.write_json(json_file, _dict): return True return False", "= None result.append(temp) return result async def get_encode_tasks(self): video_info =", "video_info: if len(info[\"encode_tasks\"]) > 0: result.append(info) return result async def", "return result async def list_video_id(self, year, cid): _video_dir = \"/\".join([self.video_dir,", "async def update_info(self, year, cid, vid, title, explanation): # 既存のjsonを読み込み", "if len(info[\"encode_tasks\"]) > 0: result.append(info) return result database = database_class()", "self.read_json(json_file) if not _dict: return False if result: # 画質の追加", "既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict = await self.read_json(json_file) if", "glob import pathlib from .filemanager import filemanager_class class database_class(filemanager_class): def", "return False if result: # 画質の追加 _dict[\"resolution\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") else: _dict[\"encode_error\"].append(f\"{resolution}p\")", "await self.read_json(json_file) if not _dict: return False if f\"{resolution}p\" in", "import pathlib from .filemanager import filemanager_class class database_class(filemanager_class): def __init__(self):", "year, cid): _video_dir = \"/\".join([self.video_dir, str(year), cid]) temp = await", "self.async_wrap(glob.glob)(f\"{_video_dir}/*\") result = {} for link_path in temp: json_file =", "self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\", recursive=True) result = [] for json_file in json_files_path:", "False if f\"{resolution}p\" in _dict[\"resolution\"]: return True # 画質の追加 _dict[\"encode_tasks\"].append(f\"{resolution}p\")", "result async def get_encode_tasks(self): video_info = await self.get_all_info() result =", "result async def list_video_id(self, year, cid): _video_dir = \"/\".join([self.video_dir, str(year),", "def encode_task(self, folderpath, resolution): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"])", "= glob.glob( f\"{directory}/1.*\")[0].split(\"/\")[-1] except IndexError: temp[\"video_file_name\"] = None result.append(temp) return", "str(year), cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") return [video_id.split(\"/\")[-1] for video_id", "return False async def encode_result(self, folderpath, resolution, result=True): # 既存のjsonを読み込み", "update_info(self, year, cid, vid, title, explanation): # 既存のjsonを読み込み json_file =", "video_info = await self.get_all_info() result = [] for info in", "\"/\".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") result = {}", "True # 画質の追加 _dict[\"encode_tasks\"].append(f\"{resolution}p\") # jsonの書き込み if self.write_json(json_file, _dict): return", "playlist = \"/\".join([folderpath, \"playlist.m3u8\"]) await self.write_playlist(playlist, resolution) async def encode_task(self,", "\"/info.json\" _dict = await self.read_json(json_file) if not _dict: pass else:", "in _dict[\"resolution\"]: return True # 画質の追加 _dict[\"encode_tasks\"].append(f\"{resolution}p\") # jsonの書き込み if", "directory try: temp[\"video_file_name\"] = glob.glob( f\"{directory}/1.*\")[0].split(\"/\")[-1] except IndexError: temp[\"video_file_name\"] =", "len(info[\"encode_tasks\"]) > 0: result.append(info) return result async def list_video_id(self, year,", "False async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\", recursive=True)", "json_files_path = await self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\", recursive=True) result = [] for", "video_info: if len(info[\"encode_tasks\"]) > 0: result.append(info) return result database =", "self.write_json(json_file, _dict) # プレイリストに書き込み playlist = \"/\".join([folderpath, \"playlist.m3u8\"]) await self.write_playlist(playlist,", "async def encode_result(self, folderpath, resolution, result=True): # 既存のjsonを読み込み json_file =", "_dict[\"encode_error\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") # jsonの書き込み self.write_json(json_file, _dict) # プレイリストに書き込み playlist =", "result.append(temp) return result async def get_encode_tasks(self): video_info = await self.get_all_info()", "resolution, result=True): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict =", "temp = await self.async_wrap(glob.glob)(f\"{_video_dir}/*\") result = {} for link_path in", "_dict: return False # jsonの更新 _dict[\"title\"] = title _dict[\"explanation\"] =", "# jsonの更新 _dict[\"title\"] = title _dict[\"explanation\"] = explanation # jsonの書き込み", "not _dict: return False if f\"{resolution}p\" in _dict[\"resolution\"]: return True", "async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\", recursive=True) result", "return True return False async def encode_result(self, folderpath, resolution, result=True):", "resolution): # 既存のjsonを読み込み json_file = \"/\".join([folderpath, \"info.json\"]) _dict = await", "except IndexError: temp[\"video_file_name\"] = None result.append(temp) return result async def", "_dict[\"encode_tasks\"].remove(f\"{resolution}p\") else: _dict[\"encode_error\"].append(f\"{resolution}p\") _dict[\"encode_tasks\"].remove(f\"{resolution}p\") # jsonの書き込み self.write_json(json_file, _dict) # プレイリストに書き込み", "None result.append(temp) return result async def get_encode_tasks(self): video_info = await", "return result async def get_encode_tasks(self): video_info = await self.get_all_info() result", "self.read_json(json_file) if not _dict: pass else: result[link_path.split(\"/\")[-1]] = _dict return", "def list_video_id(self, year, cid): _video_dir = \"/\".join([self.video_dir, str(year), cid]) temp", "def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f\"./{self.video_dir}/**/info.json\", recursive=True) result =", "if self.write_json(json_file, _dict): return True return False async def get_all_info(self):", "import filemanager_class class database_class(filemanager_class): def __init__(self): filemanager_class.__init__(self) async def update_info(self," ]
[ "Exception(\"Only full OLC supported. Use olc.recoverNearest().\") self.code = plus_code.upper() if", "self.code_length def getCoordinateIncrement(self): '''get 1th value''' return self.coordinate_increment # Copy", "same tile''' if self.isSameTile(potentialNeighbor): return False neighbors = self.getNeighbors() for", "x in BASE_20_BORDER_SET if x[0] == 'X'} EAST_DIGITS = {x", "SEPARATOR if len(tileAddress) == TileSize.PINPOINT.getCodeLength(): detectedTileSize = TileSize.PINPOINT olcBuilder +=", "PADDING_CHARACTER = '0' PADDING_2 = \"00\" PADDING_4 = \"0000\" PADDING_6", "['2', 'X'] or x[1] in ['2', 'X']} NORTH_DIGITS = {x", "other way around, do so''' firstDiff = self.characterDistance(c1, c2) NUM_CHARACTERS_USED", "tileDistance def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength(): ''' tile", "= memoized_digit_dict.get(f\"N{iterations_needed}\") east_set = memoized_digit_dict.get(f\"E{iterations_needed}\", set()) south_set = memoized_digit_dict.get(f\"S{iterations_needed}\", set())", "self.tile_size.getCodeLength() == desired_tile_size.getCodeLength(): ''' tile is desired size ''' return", "10 characters long.''' PINPOINT = (10, 0.000125) def __init__(self, code_length,", "too big' raise Exception(\"OLC padding larger than allowed by desired_tile_size\")", "address is a [2/4/6/8/10]-character string that corresponds to a *", "neighborTile is south ''' direction = direction + 'S' if", "for x in BASE_20_SET if x[0] in ['2', 'X'] or", "in existing_bases for base in memoized_digit_dict.get(f\"{eight_point_direction}1\")} memoized_digit_dict[f\"{eight_point_direction}{i + 2}\"] =", "globe, but can be up to approximately 110km. Tile addresses", "whole tile, probably padded with '0' characters */''' return self.getWrappedOpenLocationCode()", "will be 4 characters long.''' REGION = (4, 1.0) '''", "constructor arguments\") if lat and long: self.constructTileFromLatLong(lat, long, tile_size) elif", "north will be Xd east dX south 2d west d2'''", "supported. Use recover().\") modified_plus_code = return_code_of_tile_size(plus_code, tile_size) self.code = modified_plus_code.upper()", "poles''' neighbors.add(new_OpenGeoTile) return neighbors def isSameTile(self, potentialSameTile): '''/** * Check", "else: raise Exception(\"Too precise, sort this later\") def constructTileFromCodeAndSize(self, plus_code,", "tile_size=self.getTileSize()) if not self.isSameTile(new_OpenGeoTile): '''//don't add tiles that are the", "the same or adjacent * \"tiles\", to determine all neighboring", "*/''' return self.code def returnCode(self): return self.code def getTileSize(self): '''/**", "detectedTileSize = TileSize.REGION olcBuilder += tileAddress + PADDING_4 + SEPARATOR", "i in range(iterations_needed): base += ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address + base)}", "tile_size) self.code = modified_plus_code.upper() self.tile_size = tile_size def constructTileFromLatLong(self, lat:", "for d in eight_point_direction] directions = [direction for direction in", "the smaller tile, //but not the smaller tile itself, is", "the area potentialMember falls within the area of this tile,", "desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address = self.getTileAddress() if len(address) == TileSize.PINPOINT.getCodeLength(): '''", "the given size that need to * be traversed getting", "a tile describes the same area as this one. *", "== 2: ''' NW, NE, SW, SE... should return only", "globe, but can be up to approximately 14m. Tile addresses", "tile''' if self.isSameTile(potentialNeighbor): return False neighbors = self.getNeighbors() for neighbor", "ordered data''' directions_list = [\"NW\", \"N\", \"NE\", \"E\", \"SE\", \"S\",", "in BASE_20_BORDER_SET if x[0] == 'X'} EAST_DIGITS = {x for", "new OpenGeoTile from an existing {@link com.google.openlocationcode.OpenLocationCode}. @param olc OpenLocationCode", "arguments\") if lat and long: self.constructTileFromLatLong(lat, long, tile_size) elif code", "this OpenGeoTile */''' return self.code def returnCode(self): return self.code def", "return self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def getParentTileAddress(self): return self.getTileAddressPrefix() def getTileOpenLocationCode(self): #", "i in range(int(iterations_needed)): address_set = return_set_of_subaddresses(address_set) tile_set = {OpenGeoTile(address) for", "West ''' base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed)", "in tiles etc. * * Open Location Code is a", "of the whole tile, see {@link #getTileOpenLocationCode()}. * @return the", "by desired_tile_size\") code_address = code[:desired_tile_size.getCodeLength()] full_length = TileSize.PINPOINT.getCodeLength() code =", "be 4 characters long.''' REGION = (4, 1.0) ''' An", "return math.atan2(yDiff, xDiff) def getEightPointDirectionOfNeighbor(self, neighborTile): ''' returns neighbor's direction,", "CODE_ALPHABET = olc.CODE_ALPHABET_ BASE_20_SET = {x+y for x in CODE_ALPHABET", "tile is right -> neighborTile is east ''' direction =", "if not */''' # //if A contains B, then B's", "sizes and addresses are the same; false if not */'''", "string that corresponds to a * valid {@link com.google.openlocationcode.OpenLocationCode} after", "= [20.0, 1.0, 0.05, 0.0025, 0.000125] delta = self.getTileSize().getCoordinateIncrement() code_area", "where both are the same; false if not */''' #", "for x in BASE_20_BORDER_SET if x[1] == 'X'} SOUTH_DIGITS =", "case of a GLOBAL tile, * returns the empty string.", "then B's address has A's address as a prefix return", "if eight_point_direction is None: ''' all borders ''' ''' traveling", "so this should only be * interpreted as a very", "set() west_exists = west_set != set() for base in north_set:", "if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\") return", "the globe, but can be up to approximately 2200km. Tile", "will be 10 characters long.''' PINPOINT = (10, 0.000125) def", "length of this tile varies with its location on the", "area potentialMember falls within the area of this tile, including", "not west_exists: west_set.add(west_base) memoized_digit_dict[f\"E{iterations_needed}\"] = east_set memoized_digit_dict[f\"S{iterations_needed}\"] = south_set memoized_digit_dict[f\"W{iterations_needed}\"]", "i in range(quickest_i, iterations_needed): existing_bases = memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\") next_set", "otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\") return self.getLatitudinalTileDistance(otherTile,", "= (6, 0.05) ''' An area of 0.0025° x 0.0025°.", "== '2'} memoized_digit_dict = { \"N1\": NORTH_DIGITS, \"E1\": EAST_DIGITS, \"S1\":", "* @param potentialSameTile the OpenGeoTile to check * @return true", "if not base_set: quickest_i = 0 for i in reversed(range(iterations_needed)):", "# deltas = [20.0, 1.0, 0.05, 0.0025, 0.000125] delta =", "{x for x in BASE_20_BORDER_SET if x[0] == '2'} WEST_DIGITS", "* For the plus code of the whole tile, see", "not exist in alphabet\") return index def characterDistance(self, c1, c2):", "return index def characterDistance(self, c1, c2): return self.getCharacterIndex(c1) - self.getCharacterIndex(c2)", "east ''' direction = direction + 'E' else: ''' other", "return {address+base for address in set_of_addresses for base in BASE_20_SET}", "OpenGeoTile from an existing * {@link com.google.openlocationcode.OpenLocationCode}. * @param olc", "Creates a new OpenGeoTile from a tile address. * @param", "the typically 8 neighboring tiles of the same size. *", "= (10, 0.000125) def __init__(self, code_length, coordinate_increment): self.code_length = code_length", "is a technology developed by Google and licensed under the", "eight_point_direction is None: directions = directions_list elif isinstance(eight_point_direction, str): directions", "= code_area.longitudeCenter '''directions_list included to keep ordered data''' directions_list =", "class allows to determine whether two locations are in the", "CODE_ALPHABET.find(other_tile_y): ''' other tile is right -> neighborTile is east", "can be up to approximately 5.5km. Tile addresses will be", "+1, 0, -1, -1, -1, 0] #long_diff = [-1, 0,", "1] if i == 0: '''//for the first longitudinal value,", "see https://github.com/google/open-location-code * * @author <NAME> * @version 0.1.0 */", "allowed by desired_tile_size\") iterations_needed = int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2) north_set =", "otherTile another tile of the same size as this one", "False return potentialSameTile.getTileAddress() == self.getTileAddress() def isNeighbor(self, potentialNeighbor): '''/** *", "tile address is the address of the next biggest tile", "set() for base in north_set: east_base = \"\" south_base =", "numIterations = self.tile_size.getCodeLength()/2 #; //1..5 tileDistance = 0 for i", "''' if not olc.isFull(plus_code): raise Exception(\"Only full OLC supported. Use", "Other than * {@link #getWrappedOpenLocationCode()}, this will return a full", "of type list or str\") if eight_point_direction is None: directions", "aid memorability. SEPARATOR = '+' # Copy from OpenLocationCode.java #", "'+' # Copy from OpenLocationCode.java # The character used to", "parts to aid memorability. SEPARATOR = '+' # Copy from", "y in CODE_ALPHABET} BASE_20_BORDER_SET = {x for x in BASE_20_SET", "== 'X'} SOUTH_DIGITS = {x for x in BASE_20_BORDER_SET if", "{OpenGeoTile(address+base) for base in all_border_set} elif len(eight_point_direction) == 1: '''", "return True return False def contains(self, potentialMember): '''/** * Check", "are the same as this one due to clipping near", "is right -> neighborTile is east ''' direction = direction", "smallerTile = potentialNeighbor biggerTile = self else: smallerTile = self", "desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2 address_set = set([self.getTileAddress()]) for i in range(int(iterations_needed)):", "= olc.decode(self.code) latitude = code_area.latitudeCenter longitude = code_area.longitudeCenter '''directions_list included", "self.getTileAddress()[-1] other_tile_x = neighborTile.getTileAddress()[-2] other_tile_y = neighborTile.getTileAddress()[-1] direction = \"\"", "def getChebyshevTileDistanceTo(self, otherTile): '''/** * Calculates the Chebyshev (chessboard) distance", "else: firstDiff += NUM_CHARACTERS_USED tileDistance += firstDiff else: tileDistance +=", "The prefix of a tile address is the address of", "* The full {@link com.google.openlocationcode.OpenLocationCode} for this tile. Other than", "= set() east_set = set() south_set = set() west_set =", "False neighbors = smallerTile.getNeighbors() for neighbor in neighbors: if biggerTile.contains(neighbor):", "west_base += relevant_digit + \"2\" if not east_exists: east_set.add(east_base) if", "None olcBuilder = \"\" if len(tileAddress) == TileSize.GLOBAL.getCodeLength(): detectedTileSize =", "a larger TileSize. * @throws IllegalArgumentException if olc is not", "'2'} memoized_digit_dict = { \"N1\": NORTH_DIGITS, \"E1\": EAST_DIGITS, \"S1\": SOUTH_DIGITS,", "elif len(eight_point_direction) == 2: ''' NW, NE, SW, SE... should", "getDirection(self, otherTile): '''/** * Returns the approximate direction of the", "*/''' if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\")", "approximation and used as such. * @param otherTile another tile", "'X2', 'NE': 'XX', 'SE': '2X', 'SW': '22' } base =", "if not olc.isFull(plus_code): raise Exception(\"Only full OLC supported. Use recover().\")", "True), self.getLongitudinalTileDistance(otherTile, True)) def getDirection(self, otherTile): '''/** * Returns the", "the exact plus code wrapped by this OpenGeoTile */''' return", "north_set: east_base = \"\" south_base = \"\" west_base = \"\"", "Exception(\"OLC padding larger than allowed by desired_tile_size\") iterations_needed = int(desired_tile_size.getCodeLength()/2", "self.tile_size = TileSize.REGION elif code_length == TileSize.DISTRICT.getCodeLength(): self.tile_size = TileSize.DISTRICT", "5.5km. Tile addresses will be 6 characters long.''' DISTRICT =", "use for this OpenGeoTile * @throws IllegalArgumentException passed through from", "the location * @param tile_size tile size to use for", "wrapping - basically, //if it's shorter to go the other", "memoizeDigitDict(self, eight_point_direction, iterations_needed): base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: quickest_i", "#lat_diff = [+1, +1, +1, 0, -1, -1, -1, 0]", "re.findall('..', base) ''' north will be Xd east dX south", "into two parts to aid memorability. SEPARATOR = '+' #", "tile_size=None, lat=None, long=None, ): if not (code or (code and", "longitude + (delta * long_diff) new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize())", "This can be a padded code, in which * case", "SEPARATOR if len(tileAddress) == TileSize.DISTRICT.getCodeLength(): detectedTileSize = TileSize.DISTRICT olcBuilder +=", "BASE_20_BORDER_SET if x[1] == '2'} memoized_digit_dict = { \"N1\": NORTH_DIGITS,", "code, in which * case the resulting OpenGeoTile will have", "given tile_size ''' if not olc.isFull(plus_code): raise Exception(\"Only full OLC", "True return False else: '''//tiles of different size are adjacent", "than allowed by desired_tile_size\") code_address = code[:desired_tile_size.getCodeLength()] full_length = TileSize.PINPOINT.getCodeLength()", "2200km. Tile addresses will be 2 characters long.''' GLOBAL =", "'NW': 'X2', 'NE': 'XX', 'SE': '2X', 'SW': '22' } base", "one, to calculate a distance in tiles etc. * *", "tiles of the same size; * may return less than", "westward direction * @throws IllegalArgumentException thrown if otherTile has different", "xDiff) def getEightPointDirectionOfNeighbor(self, neighborTile): ''' returns neighbor's direction, to assist", "self.code = olc.encode(lat, long, tile_size.getCodeLength()).upper() self.tile_size = tile_size def constructTileFromTileAddress(self,", "biggest tile at this location. * @return this tile's address", "padding with an appropriate * number of '0' and '+'", "area of 0.000125° x 0.000125°. The side length of this", "code_length == TileSize.DISTRICT.getCodeLength(): self.tile_size = TileSize.DISTRICT elif code_length == TileSize.NEIGHBORHOOD.getCodeLength():", "neighbors.add(new_OpenGeoTile) return neighbors def isSameTile(self, potentialSameTile): '''/** * Check if", "CODE_ALPHABET.find(other_tile_y) == 0: ''' other tile is right -> neighborTile", "2, 4, 6, 8, or 10, which corresponds to a", "west_set != set() for base in north_set: east_base = \"\"", "an existing * {@link com.google.openlocationcode.OpenLocationCode}. * @param olc OpenLocationCode for", "index def characterDistance(self, c1, c2): return self.getCharacterIndex(c1) - self.getCharacterIndex(c2) def", "olc OpenLocationCode for the current location. This can be a", "== TileSize.REGION.getCodeLength(): detectedTileSize = TileSize.REGION olcBuilder += tileAddress + PADDING_4", "one to the other tile * @throws IllegalArgumentException thrown if", "rough approximation and used as such. * @param otherTile another", "base in north_set: east_base = \"\" south_base = \"\" west_base", "* invalid length */''' detectedTileSize = None olcBuilder = \"\"", "in base_set} elif len(eight_point_direction) == 2: ''' NW, NE, SW,", "TileSize.DISTRICT olcBuilder += tileAddress + PADDING_2 + SEPARATOR if len(tileAddress)", "on the globe, but can be up to approximately 2200km.", "north_set = set() east_set = set() south_set = set() west_set", "tile address is a string of length 2, 4, 6,", "neighborTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\") self_tile_x =", "== 0: '''//for the first longitudinal value, we need to", "in the order above ''' uppercase_input_directions = [d.upper() for d", "@param tileAddress a tile address is a [2/4/6/8/10]-character string that", "for x in BASE_20_BORDER_SET if x[0] == '2'} WEST_DIGITS =", "neighbor in neighbors: if biggerTile.contains(neighbor): return True return False def", "= olcBuilder.upper() def getWrappedOpenLocationCode(self): # this code is effectively redundant", "allows to determine whether two locations are in the same", "0.000125° x 0.000125°. The side length of this tile varies", "plus_code.upper() if is_padded(plus_code): code_length = plus_code.find(PADDING_CHARACTER) else: code_length = min(len(plus_code)-1,", "for given tile_size ''' if not olc.isFull(plus_code): raise Exception(\"Only full", "@throws IllegalArgumentException when trying to pass a short (non-full) OLC,", "self.tile_size.getCodeLength()] def constructTileFromCode(self, plus_code): '''/** * Creates a new OpenGeoTile", "10) if code_length == TileSize.GLOBAL.getCodeLength(): self.tile_size = TileSize.GLOBAL elif code_length", "true if this and potentialNeighbor are adjacent (8-neighborhood); * false", "the order above ''' uppercase_input_directions = [d.upper() for d in", "Apache License 2.0. * For more information, see https://github.com/google/open-location-code *", "for base in base_set} elif len(eight_point_direction) == 2: ''' NW,", "* @param latitude latitude of the location * @param longitude", "memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address + base) for base in base_set} elif", "sizes don't match\") numIterations = self.tile_size.getCodeLength()/2 #1..5 tileDistance = 0", "= '' for i in range(iterations_needed): base += ordinal_digit_dict.get(eight_point_direction) return", "of 0.000125° x 0.000125°. The side length of this tile", "technology developed by Google and licensed under the Apache License", "the code into two parts to aid memorability. SEPARATOR =", "size to use for this OpenGeoTile @throws IllegalArgumentException when trying", "olcBuilder = \"\" if len(tileAddress) == TileSize.GLOBAL.getCodeLength(): detectedTileSize = TileSize.GLOBAL", "one tile is above the other ''' if CODE_ALPHABET.find(self_tile_y) in", "'''/** * Creates a new OpenGeoTile from an existing *", "= direction + 'E' else: ''' other tile is left", "if potentialNeighbor.isSameTile(neighbor): return True return False else: '''//tiles of different", "Check if a tile describes the same area as this", "isinstance(eight_point_direction, str): eight_point_direction = eight_point_direction.upper() set_of_border_subaddresses = set() if eight_point_direction", "raise Exception(\"Tile sizes don't match\") self_tile_x = self.getTileAddress()[-2] self_tile_y =", "in BASE_20_BORDER_SET if x[0] == '2'} WEST_DIGITS = {x for", "south 2d west d2''' for n_tuple in base_tuple_list: relevant_digit =", "0.000125) def __init__(self, code_length, coordinate_increment): self.code_length = code_length self.coordinate_increment =", "modified_plus_code.upper() self.tile_size = tile_size def constructTileFromLatLong(self, lat: float, long: float,", "larger than allowed by desired_tile_size\") iterations_needed = int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2)", "return None return {address+base for address in set_of_addresses for base", "self.getTileAddress()[-2] self_tile_y = self.getTileAddress()[-1] other_tile_x = neighborTile.getTileAddress()[-2] other_tile_y = neighborTile.getTileAddress()[-1]", "PADDING_4 = \"0000\" PADDING_6 = \"000000\" CODE_ALPHABET = olc.CODE_ALPHABET_ BASE_20_SET", "desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise Exception(\"OLC padding larger than", "as a very rough approximation and used as such. *", "int(self.getLatitudinalTileDistance(otherTile, False)) return math.atan2(yDiff, xDiff) def getEightPointDirectionOfNeighbor(self, neighborTile): ''' returns", "calculate a distance in tiles etc. * * Open Location", "* Check if a tile is neighboring this one. *", "import Enum import math, re class TileSize(Enum): ''' An area", "@return the tile address of this OpenGeoTile; */''' return self.tile_address", "by Google and licensed under the Apache License 2.0. *", "__init__(self, code=None, tile_size=None, lat=None, long=None, ): if not (code or", "+ relevant_digit if not west_exists: west_base += relevant_digit + \"2\"", "tile. * @return a plus code for the whole tile,", "tile, * returns the empty string. */''' if self.tile_size ==", "return False neighbors = smallerTile.getNeighbors() for neighbor in neighbors: if", "large margin of error, especially for big or far away", "tile is desired size ''' return self elif self.tile_size.getCodeLength() >", "deal with invalid lat/long values directly''' neighborLatitude = latitude +", "first longitudinal value, we need to take care of wrapping", "class OpenGeoTile(): ''' /** * A wrapper around an {@code", "to determine whether two locations are in the same or", "in uppercase_input_directions] neighbors = set() for direction in directions: lat_diff,", "larger than allowed by desired_tile_size\") iterations_needed = desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2", "in range(int(iterations_needed)): address_set = return_set_of_subaddresses(address_set) tile_set = {OpenGeoTile(address) for address", "''' direction = direction + 'N' else: direction = direction", "CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x): ''' other tile is above -> neighborTile", "!= set() south_exists = south_set != set() west_exists = west_set", "removed. In case of a GLOBAL tile, * returns the", "wrapped by this OpenGeoTile */''' return self.code def returnCode(self): return", "raise Exception(\"Only full OLC supported. Use olc.recoverNearest().\") self.code = plus_code.upper()", "yDiff = int(self.getLatitudinalTileDistance(otherTile, False)) return math.atan2(yDiff, xDiff) def getEightPointDirectionOfNeighbor(self, neighborTile):", "characters long.''' PINPOINT = (10, 0.000125) def __init__(self, code_length, coordinate_increment):", "\"\" south_base = \"\" west_base = \"\" base_tuple_list = re.findall('..',", "if x[1] == '2'} memoized_digit_dict = { \"N1\": NORTH_DIGITS, \"E1\":", "= memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address + base) for base in base_set}", "memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return", "areas ''' if not self.isNeighbor(neighborTile): raise Exception(\"neighborTile must be neighbor\")", "| south_set | west_set memoized_digit_dict[f\"A{iterations_needed}\"] = all_border_set return {OpenGeoTile(address+base) for", "area identified by a prefix * of the given OpenLocationCode.", "Use recover().\") modified_plus_code = return_code_of_tile_size(plus_code, tile_size) self.code = modified_plus_code.upper() self.tile_size", "Exception(\"Tile sizes don't match\") self_tile_x = self.getTileAddress()[-2] self_tile_y = self.getTileAddress()[-1]", "the current location. This can be a padded code, in", "+ PADDING_6 + SEPARATOR if len(tileAddress) == TileSize.REGION.getCodeLength(): detectedTileSize =", "Exception(\"OLC padding larger than allowed by desired_tile_size\") iterations_needed = desired_tile_size.getCodeLength()/2", "reduce by swaping digits ''' all_border_set = memoized_digit_dict.get(f\"A{iterations_needed}\") if not", "of * invalid length */''' detectedTileSize = None olcBuilder =", "from a tile address. * @param tileAddress a tile address", "TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't", "+ base)} def memoizeDigitDict(self, eight_point_direction, iterations_needed): base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if", "parent tiles ''' if CODE_ALPHABET.find(other_tile_x) == 0: ''' other tile", "* @param olc OpenLocationCode for the current location. This can", "this one. * @param potentialNeighbor the OpenGeoTile to check *", "characters long.''' DISTRICT = (6, 0.05) ''' An area of", "'E' else: ''' other tile is left -> neighborTile is", "min(len(plus_code)-1, 10) if code_length == TileSize.GLOBAL.getCodeLength(): self.tile_size = TileSize.GLOBAL elif", "OpenLocationCode} object, focusing on the area identified by a prefix", "= direction + 'N' else: direction = direction + 'S'", "raise Exception(\"Character does not exist in alphabet\") return index def", "def getTileOpenLocationCode(self): # this code is redundant '''/** * The", "length of this address * @throws IllegalArgumentException passed through from", "to check * @return true if tile sizes and addresses", "Address \"CVXW\" corresponds to OLC \"CVXW0000+\" * @return the tile", "TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible size ''' return None", "TileSize.DISTRICT.getCodeLength(): self.tile_size = TileSize.DISTRICT elif code_length == TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size =", "east_exists: east_base += relevant_digit + \"X\" if not south_exists: south_base", "south_set | west_set memoized_digit_dict[f\"A{iterations_needed}\"] = all_border_set return {OpenGeoTile(address+base) for base", "x[1] in ['2', 'X']} NORTH_DIGITS = {x for x in", "tile, see {@link #getTileOpenLocationCode()}. * @return the exact plus code", "OpenGeoTile. * @return the {@link TileSize} of this OpenGeoTile */'''", "already minimum possible size ''' return None elif self.tile_size.getCodeLength() >", "{OpenGeoTile(address) for address in address_set} return tile_set def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT,", "len(eight_point_direction) == 1: ''' North, South, East, or West '''", "return self.tile_address def getTileAddressPrefix(self): '''/** * The prefix of a", "are adjacent (8-neighborhood); * false if not */''' if potentialNeighbor.getTileSize()", "\"SW\", \"W\"] direction_dict = { \"NW\": [+1, -1], \"N\": [+1,", "a given one, to calculate a distance in tiles etc.", "= int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2) north_set = set() east_set = set()", "if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x): ''' other tile is above ->", "else: if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y): ''' other tile is right", "= memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: quickest_i = 0 for i", "neighboring tiles of the same size; * may return less", "c2) NUM_CHARACTERS_USED = 18 #; //360°/20° = 18 if abs(firstDiff)", "* @return an integer value corresponding to the number of", "== TileSize.PINPOINT.getCodeLength(): detectedTileSize = TileSize.PINPOINT olcBuilder += tileAddress[0:8] + SEPARATOR", "if CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]:", "if not type(eight_point_direction) in [type(None), list, str]: raise Exception(\"eight_point_direction must", "* Creates a new OpenGeoTile from an existing * {@link", "long=None, ): if not (code or (code and tile_size) or", "need to take care of wrapping - basically, //if it's", "+1, +1, 0, -1, -1] if not type(eight_point_direction) in [type(None),", "falls within the area of this tile, including cases *", "used as such. * @param otherTile another tile of the", "south_set = set() west_set = set() if isinstance(eight_point_direction, str): eight_point_direction", "smaller tile itself, is contained within the bigger tile''' if", "directions = [direction for direction in directions_list if direction in", "East, or West ''' base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set:", "EAST_DIGITS, \"S1\": SOUTH_DIGITS, \"W1\": WEST_DIGITS, } def is_padded(plus_code): return plus_code.find(PADDING_CHARACTER)", "from openlocationcode import openlocationcode as olc from enum import Enum", "of a GLOBAL tile, * returns the empty string. */'''", "type(eight_point_direction) in [type(None), list, str]: raise Exception(\"eight_point_direction must be of", "''' other tile is below -> neighborTile is south '''", "this and potentialNeighbor are adjacent (8-neighborhood); * false if not", "address. * @param tileAddress a tile address is a [2/4/6/8/10]-character", "for same tile''' if self.isSameTile(potentialNeighbor): return False neighbors = self.getNeighbors()", "as this one * @return an integer value corresponding to", "* For more information, see https://github.com/google/open-location-code * * @author <NAME>", "object, focusing on the area identified by a prefix *", "biggerTile.contains(smallerTile): return False neighbors = smallerTile.getNeighbors() for neighbor in neighbors:", "OLC supported. Use olc.recoverNearest().\") self.code = plus_code.upper() if is_padded(plus_code): code_length", "tileAddress + PADDING_2 + SEPARATOR if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize", "\"\" west_base = \"\" base_tuple_list = re.findall('..', base) ''' north", "long.''' NEIGHBORHOOD = (8, 0.0025) ''' An area of 0.000125°", "this will return a full plus code for the whole", "{x for x in BASE_20_BORDER_SET if x[1] == '2'} memoized_digit_dict", "self.tile_size = TileSize.PINPOINT else: raise Exception(\"Too precise, sort this later\")", "lat_diff, long_diff = direction_dict.get(direction) ''' //OLC constructor clips and normalizes,", "tileDistance = 0 for i in range(int(numIterations)): tileDistance *= 20", "in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]: ''' ajacent", "desired_tile_size.getCodeLength(): raise Exception(\"OLC padding larger than allowed by desired_tile_size\") code_address", "must be of type list or str\") if eight_point_direction is", "None: directions = directions_list elif isinstance(eight_point_direction, str): directions = []", "@param olc OpenLocationCode for the current location @param tile_size tile", "0.05, 0.0025, 0.000125] delta = self.getTileSize().getCoordinateIncrement() code_area = olc.decode(self.code) latitude", "1] c2 = otherTile.getTileAddress()[i*2 + 1] if i == 0:", "* of the given OpenLocationCode. * * Using this wrapper", "and CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles '''", "of this tile varies with its location on the globe,", "110km. Tile addresses will be 4 characters long.''' REGION =", "of 0.05° x 0.05°. The side length of this tile", "plus code for the whole tile. * @return a plus", "approximately 5.5km. Tile addresses will be 6 characters long.''' DISTRICT", "''' tile is desired size ''' return self elif self.tile_size.getCodeLength()", "return False return potentialSameTile.getTileAddress() == self.getTileAddress() def isNeighbor(self, potentialNeighbor): '''/**", "returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address = self.getTileAddress() if len(address) == TileSize.PINPOINT.getCodeLength():", "if memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\"): quickest_i = i break for i", "same; false if not */''' if potentialSameTile.getTileSize() != self.getTileSize(): return", "= [direction for direction in directions_list if direction in uppercase_input_directions]", "two characters removed. In case of a GLOBAL tile, *", "-1], \"S\": [-1, 0], \"SE\": [-1, +1], } #lat_diff =", "-> neighborTile is north ''' direction = direction + 'N'", "for neighbor in neighbors: if biggerTile.contains(neighbor): return True return False", "1.0) ''' An area of 0.05° x 0.05°. The side", "of this tile, including cases * where both are the", "self.coordinate_increment # Copy from OpenLocationCode.java # A separator used to", "def getTileSize(self): '''/** * Get the {@link TileSize} of this", "return only one tile''' ordinal_digit_dict = { 'NW': 'X2', 'NE':", "= set() for direction in directions: lat_diff, long_diff = direction_dict.get(direction)", "TileSize.PINPOINT olcBuilder += tileAddress[0:8] + SEPARATOR + tileAddress[8:10] if detectedTileSize", "sort this later\") def constructTileFromCodeAndSize(self, plus_code, tile_size): ''' Creates a", "address is the address of the next biggest tile at", "License 2.0. * For more information, see https://github.com/google/open-location-code * *", "of the typically 8 neighboring tiles of the same size;", "true if the area potentialMember falls within the area of", "set()) west_set = memoized_digit_dict.get(f\"W{iterations_needed}\", set()) east_exists = east_set != set()", "of different size are adjacent if at least one neighbor", "if not tile_size: tile_size = TileSize.PINPOINT self.code = olc.encode(lat, long,", "potentialSameTile the OpenGeoTile to check * @return true if tile", "{@link OpenLocationCode#OpenLocationCode(double, double, int)} */''' if not tile_size: tile_size =", "\"NE\", \"E\", \"SE\", \"S\", \"SW\", \"W\"] direction_dict = { \"NW\":", "*/''' if potentialNeighbor.getTileSize() == self.getTileSize(): '''//avoid iterating over neighbors for", "return self.code def returnCode(self): return self.code def getTileSize(self): '''/** *", "'''//following definitions copied from OpenLocationCode.java''' index = \"23456789CFGHJMPQRVWX\".find(c.upper()) if index", "return self elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big'", "south_set = memoized_digit_dict.get(f\"S{iterations_needed}\", set()) west_set = memoized_digit_dict.get(f\"W{iterations_needed}\", set()) east_exists =", "tileDistance *= 20 c1 = self.getTileAddress()[i*2] c2 = otherTile.getTileAddress()[i*2] tileDistance", "otherTile): '''/** * Calculates the Chebyshev (chessboard) distance between this", "is above the other ''' if CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1]", "* Returns the approximate direction of the other tile relative", "ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_x) == 0: ''' other", "the OpenGeoTile to check * @return true if tile sizes", "import openlocationcode as olc from enum import Enum import math,", "CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]: '''", "+ 'E' else: ''' other tile is left -> neighborTile", "c1 = self.getTileAddress()[i*2 + 1] c2 = otherTile.getTileAddress()[i*2 + 1]", "the whole tile, see {@link #getTileOpenLocationCode()}. * @return the exact", "west ''' direction = direction + 'W' return direction def", "absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\")", "through from * {@link OpenLocationCode#OpenLocationCode(double, double, int)} */''' if not", "size are adjacent if at least one neighbor of the", "//but not the smaller tile itself, is contained within the", "south_exists: south_base += \"2\" + relevant_digit if not west_exists: west_base", "memoized_digit_dict = { \"N1\": NORTH_DIGITS, \"E1\": EAST_DIGITS, \"S1\": SOUTH_DIGITS, \"W1\":", "//360°/20° = 18 if abs(firstDiff) > NUM_CHARACTERS_USED/2: if firstDiff >", "or x[1] in ['2', 'X']} NORTH_DIGITS = {x for x", "quickest_i = i break for i in range(quickest_i, iterations_needed): existing_bases", "have to deal with invalid lat/long values directly''' neighborLatitude =", "def __init__(self, code=None, tile_size=None, lat=None, long=None, ): if not (code", "x 0.05°. The side length of this tile varies with", "pad codes. PADDING_CHARACTER = '0' PADDING_2 = \"00\" PADDING_4 =", "empty string. */''' if self.tile_size == TileSize.GLOBAL: return \"\" else:", "so''' firstDiff = self.characterDistance(c1, c2) NUM_CHARACTERS_USED = 18 #; //360°/20°", "tile size to use for this OpenGeoTile * @throws IllegalArgumentException", "not self.isNeighbor(neighborTile): raise Exception(\"neighborTile must be neighbor\") if neighborTile.getTileSize() !=", "''' An area of 0.000125° x 0.000125°. The side length", "* @return the tile address of this OpenGeoTile; */''' return", "firstDiff += NUM_CHARACTERS_USED tileDistance += firstDiff else: tileDistance += self.characterDistance(c1,", "latitude latitude of the location * @param longitude longitude of", "TileSize.PINPOINT self.code = olc.encode(lat, long, tile_size.getCodeLength()).upper() self.tile_size = tile_size def", "0, -1, -1, -1, 0] #long_diff = [-1, 0, +1,", "the number of tiles of the given size that need", "{ 'NW': 'X2', 'NE': 'XX', 'SE': '2X', 'SW': '22' }", "return less than 8 neighbors for tiles near the poles.", "a string of length 2, 4, 6, 8, or 10,", "detectedTileSize self.code = olcBuilder.upper() def getWrappedOpenLocationCode(self): # this code is", "don't match\") return self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile, True) def getChebyshevTileDistanceTo(self,", "for y in CODE_ALPHABET} BASE_20_BORDER_SET = {x for x in", "match\") return max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile, True)) def getDirection(self, otherTile): '''/**", "self.getCharacterIndex(c1) - self.getCharacterIndex(c2) def getLatitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() !=", "!= set() west_exists = west_set != set() for base in", "eight_point_direction = eight_point_direction.upper() set_of_border_subaddresses = set() if eight_point_direction is None:", "return self.code def getTileSize(self): '''/** * Get the {@link TileSize}", "SE... should return only one tile''' ordinal_digit_dict = { 'NW':", "south_set != set() west_exists = west_set != set() for base", "[ 0, +1], \"SW\": [-1, -1], \"S\": [-1, 0], \"SE\":", "return {OpenGeoTile(address + base) for base in base_set} elif len(eight_point_direction)", "its location on the globe, but can be up to", "An area of 0.000125° x 0.000125°. The side length of", "wrapper around an {@code OpenLocationCode} object, focusing on the area", "if len(tileAddress) == TileSize.DISTRICT.getCodeLength(): detectedTileSize = TileSize.DISTRICT olcBuilder += tileAddress", "else: ''' this list construction keeps directions in the order", "< CODE_ALPHABET.find(other_tile_y): ''' other tile is right -> neighborTile is", "OpenGeoTile * @throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(double,", "size ''' return self elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is", "contains B, then B's address has A's address as a", "up to approximately 275m. Tile addresses will be 8 characters", "@return true if the area potentialMember falls within the area", "long, tile_size.getCodeLength()).upper() self.tile_size = tile_size def constructTileFromTileAddress(self, tileAddress): '''/** *", "both are the same; false if not */''' # //if", "ordinal_digit_dict = { 'NW': 'X2', 'NE': 'XX', 'SE': '2X', 'SW':", "'''directions_list included to keep ordered data''' directions_list = [\"NW\", \"N\",", "the area identified by a prefix * of the given", "Ported by scoofy on 08.31.21 ''' def __init__(self, code=None, tile_size=None,", "self.getLongitudinalTileDistance(otherTile, True) def getChebyshevTileDistanceTo(self, otherTile): '''/** * Calculates the Chebyshev", "self.isNeighbor(neighborTile): raise Exception(\"neighborTile must be neighbor\") if neighborTile.getTileSize() != self.getTileSize():", "east_set = memoized_digit_dict.get(f\"E{iterations_needed}\", set()) south_set = memoized_digit_dict.get(f\"S{iterations_needed}\", set()) west_set =", "existing_base in existing_bases for base in memoized_digit_dict.get(f\"{eight_point_direction}1\")} memoized_digit_dict[f\"{eight_point_direction}{i + 2}\"]", "reversed(range(iterations_needed)): if memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\"): quickest_i = i break for", "long)): raise Exception(\"Invalid OpenGeoTile constructor arguments\") if lat and long:", "= self.characterDistance(c1, c2) NUM_CHARACTERS_USED = 18 #; //360°/20° = 18", "one * @return an angle in radians, 0 being an", "OpenLocationCode#OpenLocationCode(String)} or thrown if tileAddress is of * invalid length", "an integer value corresponding to the number of tiles of", "focusing on the area identified by a prefix * of", "of the same size. * @return an array of the", "base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: quickest_i = 0 for", "A's address as a prefix return potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self, otherTile):", "the same size as this one * @return an integer", "relevant_digit + \"2\" if not east_exists: east_set.add(east_base) if not south_exists:", "all neighboring tiles of a given one, to calculate a", "14m. Tile addresses will be 10 characters long.''' PINPOINT =", "OpenLocationCode#OpenLocationCode(double, double, int)} */''' if not tile_size: tile_size = TileSize.PINPOINT", "eight_point_direction=None): '''/** * Get an array of the typically 8", "+= self.characterDistance(c1, c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def", "tiles ''' if CODE_ALPHABET.find(other_tile_y) == 0: ''' other tile is", "same or adjacent * \"tiles\", to determine all neighboring tiles", "otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\") xDiff =", "data''' directions_list = [\"NW\", \"N\", \"NE\", \"E\", \"SE\", \"S\", \"SW\",", "directions in the order above ''' uppercase_input_directions = [d.upper() for", "def is_tile_address(plus_code): return plus_code.find(SEPARATOR) == -1 def return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code", "address_set = return_set_of_subaddresses(address_set) tile_set = {OpenGeoTile(address) for address in address_set}", "for x in BASE_20_BORDER_SET if x[1] == '2'} memoized_digit_dict =", "number of '0' and '+' characters. Example: Address \"CVXW\" corresponds", "self.characterDistance(c1, c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def returnSetOfSubtiles(self,", "to go the other way around, do so''' firstDiff =", "copied from OpenLocationCode.java''' index = \"23456789CFGHJMPQRVWX\".find(c.upper()) if index == -1:", "not south_exists: south_set.add(south_base) if not west_exists: west_set.add(west_base) memoized_digit_dict[f\"E{iterations_needed}\"] = east_set", "relevant_digit = n_tuple[1] if not east_exists: east_base += relevant_digit +", "character used to pad codes. PADDING_CHARACTER = '0' PADDING_2 =", "''' An area of 0.05° x 0.05°. The side length", "''' //OLC constructor clips and normalizes, //so we don't have", "Check if a tile is neighboring this one. * @param", "base_tuple_list: relevant_digit = n_tuple[1] if not east_exists: east_base += relevant_digit", "memoized_digit_dict[f\"S{iterations_needed}\"] = south_set memoized_digit_dict[f\"W{iterations_needed}\"] = west_set all_border_set = north_set |", "smallerTile.getNeighbors() for neighbor in neighbors: if biggerTile.contains(neighbor): return True return", "len(tileAddress) == TileSize.DISTRICT.getCodeLength(): detectedTileSize = TileSize.DISTRICT olcBuilder += tileAddress +", "== TileSize.DISTRICT.getCodeLength(): detectedTileSize = TileSize.DISTRICT olcBuilder += tileAddress + PADDING_2", "of 0.0025° x 0.0025°. The side length of this tile", "determined by the length of this address * @throws IllegalArgumentException", "* number of '0' and '+' characters. Example: Address \"CVXW\"", "uppercase_input_directions] neighbors = set() for direction in directions: lat_diff, long_diff", "tile_set = {OpenGeoTile(address) for address in address_set} return tile_set def", "for n_tuple in base_tuple_list: relevant_digit = n_tuple[1] if not east_exists:", "direction in uppercase_input_directions] neighbors = set() for direction in directions:", "None return {address+base for address in set_of_addresses for base in", "base for existing_base in existing_bases for base in memoized_digit_dict.get(f\"{eight_point_direction}1\")} memoized_digit_dict[f\"{eight_point_direction}{i", "20°. The side length of this tile varies with its", "self.getTileSize().getCoordinateIncrement() code_area = olc.decode(self.code) latitude = code_area.latitudeCenter longitude = code_area.longitudeCenter", "the same size; * may return less than 8 neighbors", "self.getTileSize(): raise Exception(\"Tile sizes don't match\") numIterations = self.tile_size.getCodeLength()/2 #;", "if eight_point_direction.upper() in directions_list: directions.append(eight_point_direction.upper()) else: ''' this list construction", "memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\") next_set = {existing_base + base for existing_base", "deltas = [20.0, 1.0, 0.05, 0.0025, 0.000125] delta = self.getTileSize().getCoordinateIncrement()", "[+1, 0], \"NE\": [+1, +1], \"W\": [ 0, -1], \"E\":", "latitude + (delta * lat_diff) neighborLongitude = longitude + (delta", "long, tile_size) elif code and tile_size: self.constructTileFromCodeAndSize(code, tile_size) elif code:", "characters */''' return self.getWrappedOpenLocationCode() def getNeighbors(self, eight_point_direction=None): '''/** * Get", "x in BASE_20_BORDER_SET if x[1] == '2'} memoized_digit_dict = {", "= TileSize.GLOBAL elif code_length == TileSize.REGION.getCodeLength(): self.tile_size = TileSize.REGION elif", "# A separator used to break the code into two", "by desired_tile_size\") iterations_needed = int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2) north_set = set()", "''' An area of 20° x 20°. The side length", "iterating over neighbors for same tile''' if self.isSameTile(potentialNeighbor): return False", "plus code for the whole tile, probably padded with '0'", "by desired_tile_size\") iterations_needed = desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2 address_set = set([self.getTileAddress()])", "* @return a plus code for the whole tile, probably", "address_set = set([self.getTileAddress()]) for i in range(int(iterations_needed)): address_set = return_set_of_subaddresses(address_set)", "NEIGHBORHOOD = (8, 0.0025) ''' An area of 0.000125° x", "other tile is right -> neighborTile is east ''' direction", "or str\") if eight_point_direction is None: directions = directions_list elif", "= {existing_base + base for existing_base in existing_bases for base", "base) for base in base_set} elif len(eight_point_direction) == 2: '''", "neighborLatitude = latitude + (delta * lat_diff) neighborLongitude = longitude", "in alphabet\") return index def characterDistance(self, c1, c2): return self.getCharacterIndex(c1)", "return False else: '''//tiles of different size are adjacent if", "{@code OpenLocationCode} object, focusing on the area identified by a", "by the length of this address * @throws IllegalArgumentException passed", "base)} def memoizeDigitDict(self, eight_point_direction, iterations_needed): base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not", "olc.isFull(plus_code): raise Exception(\"Only full OLC supported. Use olc.recoverNearest().\") self.code =", "= {x+y for x in CODE_ALPHABET for y in CODE_ALPHABET}", "raise Exception(\"Invalid tile address\") self.tile_size = detectedTileSize self.code = olcBuilder.upper()", "= memoized_digit_dict.get(f\"E{iterations_needed}\", set()) south_set = memoized_digit_dict.get(f\"S{iterations_needed}\", set()) west_set = memoized_digit_dict.get(f\"W{iterations_needed}\",", "= (2, 20.0) ''' An area of 1° x 1°.", "if self.tile_size == TileSize.GLOBAL: return \"\" else: return self.getTileAddress()[0: self.tile_size.getCodeLength()-2]", "OpenGeoTile to check * @return true if the area potentialMember", "and tile_size) or (lat and long)): raise Exception(\"Invalid OpenGeoTile constructor", "allowed by desired_tile_size\") code_address = code[:desired_tile_size.getCodeLength()] full_length = TileSize.PINPOINT.getCodeLength() code", "* (full_length - len(code_address))) if desired_tile_size == TileSize.PINPOINT: code =", "def is_padded(plus_code): return plus_code.find(PADDING_CHARACTER) != -1 def is_tile_address(plus_code): return plus_code.find(SEPARATOR)", "tiles that are the same as this one due to", "= self.getTileAddress() if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum", "code = code[:-2] + SEPARATOR + code[-2:] else: code =", "self.tile_size == TileSize.GLOBAL: return \"\" else: return self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def", "bigger tile''' if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength(): smallerTile = potentialNeighbor biggerTile", "or far away tiles, so this should only be *", "if is_tile_address(code): self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code) self.tile_address = self.code.replace(SEPARATOR, \"\")[0: self.tile_size.getCodeLength()]", "getWrappedOpenLocationCode(self): # this code is effectively redundant as python has", "padding larger than allowed by desired_tile_size\") iterations_needed = desired_tile_size.getCodeLength()/2 -", "memoized_digit_dict.get(f\"W{iterations_needed}\", set()) east_exists = east_set != set() south_exists = south_set", "detectedTileSize = TileSize.NEIGHBORHOOD olcBuilder += tileAddress + SEPARATOR if len(tileAddress)", "TileSize} of this OpenGeoTile. * @return the {@link TileSize} of", "# //if A contains B, then B's address has A's", "side length of this tile varies with its location on", "__init__(self, code_length, coordinate_increment): self.code_length = code_length self.coordinate_increment = coordinate_increment def", "a full code */''' if not olc.isFull(plus_code): raise Exception(\"Only full", "= \"0000\" PADDING_6 = \"000000\" CODE_ALPHABET = olc.CODE_ALPHABET_ BASE_20_SET =", "if not north_base_set: self.memoizeDigitDict(\"N\", iterations_needed) north_set = memoized_digit_dict.get(f\"N{iterations_needed}\") east_set =", "= TileSize.NEIGHBORHOOD elif code_length == TileSize.PINPOINT.getCodeLength(): self.tile_size = TileSize.PINPOINT else:", "above the other ''' if CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1] and", "== desired_tile_size.getCodeLength(): ''' tile is desired size ''' return self", "modified_plus_code = return_code_of_tile_size(plus_code, tile_size) self.code = modified_plus_code.upper() self.tile_size = tile_size", "SOUTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] ==", "this tile. Other than * {@link #getWrappedOpenLocationCode()}, this will return", "can be up to approximately 275m. Tile addresses will be", "2d west d2''' for n_tuple in base_tuple_list: relevant_digit = n_tuple[1]", "''' An area of 0.0025° x 0.0025°. The side length", "type list or str\") if eight_point_direction is None: directions =", "@return the exact plus code wrapped by this OpenGeoTile */'''", "size. * @return an array of the typically 8 neighboring", "float, long: float, tile_size=None): '''/** * Creates a new OpenGeoTile", "TileSize.GLOBAL olcBuilder += tileAddress + PADDING_6 + SEPARATOR if len(tileAddress)", "to the other tile * @throws IllegalArgumentException thrown if otherTile", "be up to approximately 5.5km. Tile addresses will be 6", "def return_set_of_subaddresses(set_of_addresses): for address in set_of_addresses: if len(address) == TileSize.PINPOINT.getCodeLength():", "''' ''' let's do it once, and try to reduce", "being an eastward direction, +/- PI being westward direction *", "0.0025, 0.000125] delta = self.getTileSize().getCoordinateIncrement() code_area = olc.decode(self.code) latitude =", "or thrown if tileAddress is of * invalid length */'''", "OpenGeoTile constructor arguments\") if lat and long: self.constructTileFromLatLong(lat, long, tile_size)", "the other tile * @throws IllegalArgumentException thrown if otherTile has", "this list construction keeps directions in the order above '''", "* have a large margin of error, especially for big", "+1, 0, -1, -1] if not type(eight_point_direction) in [type(None), list,", "is None: ''' all borders ''' ''' traveling salesman problem", "''' An area of 1° x 1°. The side length", "to pad codes. PADDING_CHARACTER = '0' PADDING_2 = \"00\" PADDING_4", "sizes don't match\") numIterations = self.tile_size.getCodeLength()/2 #; //1..5 tileDistance =", "0.000125] delta = self.getTileSize().getCoordinateIncrement() code_area = olc.decode(self.code) latitude = code_area.latitudeCenter", "iterations_needed = int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2) north_set = set() east_set =", "contains another one. * @param potentialMember the OpenGeoTile to check", "if firstDiff > 0: firstDiff -= NUM_CHARACTERS_USED else: firstDiff +=", "class TileSize(Enum): ''' An area of 20° x 20°. The", "in BASE_20_SET if x[0] in ['2', 'X'] or x[1] in", "iterations_needed): base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: quickest_i = 0", "https://github.com/google/open-location-code * * @author <NAME> * @version 0.1.0 */ Ported", "if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength(): smallerTile = potentialNeighbor biggerTile = self", "d2''' for n_tuple in base_tuple_list: relevant_digit = n_tuple[1] if not", "string of length 2, 4, 6, 8, or 10, which", "address has A's address as a prefix return potentialMember.getTileAddress().startswith(self.getTileAddress()) def", "tile_size def constructTileFromLatLong(self, lat: float, long: float, tile_size=None): '''/** *", "for i in reversed(range(iterations_needed)): if memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\"): quickest_i =", "tile''' ordinal_digit_dict = { 'NW': 'X2', 'NE': 'XX', 'SE': '2X',", "= TileSize.PINPOINT else: raise Exception(\"Too precise, sort this later\") def", "''' /** * A wrapper around an {@code OpenLocationCode} object,", "tile_size) or (lat and long)): raise Exception(\"Invalid OpenGeoTile constructor arguments\")", "for i in range(int(iterations_needed)): address_set = return_set_of_subaddresses(address_set) tile_set = {OpenGeoTile(address)", "corresponds to OLC \"CVXW0000+\" * @return the tile address of", "take care of wrapping - basically, //if it's shorter to", "false if not */''' if potentialSameTile.getTileSize() != self.getTileSize(): return False", "'22' } base = '' for i in range(iterations_needed): base", "area of 0.0025° x 0.0025°. The side length of this", "{@link com.google.openlocationcode.OpenLocationCode} for this tile. Other than * {@link #getWrappedOpenLocationCode()},", "return direction def getCharacterIndex(self, c): '''//following definitions copied from OpenLocationCode.java'''", "Open Location Code is a technology developed by Google and", "address in address_set} return tile_set def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address", "long.''' PINPOINT = (10, 0.000125) def __init__(self, code_length, coordinate_increment): self.code_length", "= otherTile.getTileAddress()[i*2] tileDistance += self.characterDistance(c1,c2) if absolute_value_bool: return abs(tileDistance) return", "Check if this tile contains another one. * @param potentialMember", "'0' and '+' characters. Example: Address \"CVXW\" corresponds to OLC", "self.constructTileFromCodeAndSize(code, tile_size) elif code: if is_tile_address(code): self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code) self.tile_address", "if OLC has too much padding for given tile_size '''", "''' address already minimum possible size ''' return None elif", "* Using this wrapper class allows to determine whether two", "*/''' if self.tile_size == TileSize.GLOBAL: return \"\" else: return self.getTileAddress()[0:", "False neighbors = self.getNeighbors() for neighbor in neighbors: if potentialNeighbor.isSameTile(neighbor):", "for the current location @param tile_size tile size to use", "code and tile_size: self.constructTileFromCodeAndSize(code, tile_size) elif code: if is_tile_address(code): self.constructTileFromTileAddress(code)", "1}\") next_set = {existing_base + base for existing_base in existing_bases", "== TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size = TileSize.NEIGHBORHOOD elif code_length == TileSize.PINPOINT.getCodeLength(): self.tile_size", "all_border_set} elif len(eight_point_direction) == 1: ''' North, South, East, or", "desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength(): ''' tile is desired size", "need to * be traversed getting from one to the", "tile's address with the final two characters removed. In case", "CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if", "'''/** * Creates a new OpenGeoTile from lat/long coordinates. *", "if not olc.isFull(plus_code): raise Exception(\"Only full OLC supported. Use olc.recoverNearest().\")", "error, especially for big or far away tiles, so this", "set_of_addresses: if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible", "self.getCharacterIndex(c2) def getLatitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise", "prefix * of the given OpenLocationCode. * * Using this", "== TileSize.PINPOINT.getCodeLength(): self.tile_size = TileSize.PINPOINT else: raise Exception(\"Too precise, sort", "tileDistance += self.characterDistance(c1,c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def", "False else: '''//tiles of different size are adjacent if at", "+ base) for base in base_set} elif len(eight_point_direction) == 2:", "x in BASE_20_SET if x[0] in ['2', 'X'] or x[1]", "long: self.constructTileFromLatLong(lat, long, tile_size) elif code and tile_size: self.constructTileFromCodeAndSize(code, tile_size)", "of the smaller tile, //but not the smaller tile itself,", "+= self.characterDistance(c1,c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def getLongitudinalTileDistance(self,", "digits ''' all_border_set = memoized_digit_dict.get(f\"A{iterations_needed}\") if not all_border_set: north_base_set =", "directions_list elif isinstance(eight_point_direction, str): directions = [] if eight_point_direction.upper() in", "'''get 0th value''' return self.code_length def getCoordinateIncrement(self): '''get 1th value'''", "possible size ''' return None elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size", "= (8, 0.0025) ''' An area of 0.000125° x 0.000125°.", "exist in alphabet\") return index def characterDistance(self, c1, c2): return", "the given OpenLocationCode. * * Using this wrapper class allows", "olc from enum import Enum import math, re class TileSize(Enum):", "if len(tileAddress) == TileSize.REGION.getCodeLength(): detectedTileSize = TileSize.REGION olcBuilder += tileAddress", "= set() if isinstance(eight_point_direction, str): eight_point_direction = eight_point_direction.upper() set_of_border_subaddresses =", "relevant_digit + \"X\" if not south_exists: south_base += \"2\" +", "raise Exception(\"Too precise, sort this later\") def constructTileFromCodeAndSize(self, plus_code, tile_size):", "care of wrapping - basically, //if it's shorter to go", "BASE_20_BORDER_SET if x[0] == '2'} WEST_DIGITS = {x for x", "+ 'S' else: if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x): ''' other tile", "self_tile_y = self.getTileAddress()[-1] other_tile_x = neighborTile.getTileAddress()[-2] other_tile_y = neighborTile.getTileAddress()[-1] direction", "Returns the approximate direction of the other tile relative to", "neighborTile is north ''' direction = direction + 'N' else:", "== TileSize.GLOBAL.getCodeLength(): detectedTileSize = TileSize.GLOBAL olcBuilder += tileAddress + PADDING_6", "-1 def is_tile_address(plus_code): return plus_code.find(SEPARATOR) == -1 def return_code_of_tile_size(too_precise_plus_code, desired_tile_size):", "getChebyshevTileDistanceTo(self, otherTile): '''/** * Calculates the Chebyshev (chessboard) distance between", "other tile is above -> neighborTile is north ''' direction", "= [+1, +1, +1, 0, -1, -1, -1, 0] #long_diff", "self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def getParentTileAddress(self): return self.getTileAddressPrefix() def getTileOpenLocationCode(self): # this", "NUM_CHARACTERS_USED else: firstDiff += NUM_CHARACTERS_USED tileDistance += firstDiff else: tileDistance", "return {OpenGeoTile(address+base) for base in all_border_set} elif len(eight_point_direction) == 1:", "OpenGeoTile from an existing {@link com.google.openlocationcode.OpenLocationCode}. @param olc OpenLocationCode for", "OpenLocationCode for the current location. This can be a padded", "long.''' GLOBAL = (2, 20.0) ''' An area of 1°", "to approximately 2200km. Tile addresses will be 2 characters long.'''", "and long: self.constructTileFromLatLong(lat, long, tile_size) elif code and tile_size: self.constructTileFromCodeAndSize(code,", "the next biggest tile at this location. * @return this", "= smallerTile.getNeighbors() for neighbor in neighbors: if biggerTile.contains(neighbor): return True", "return self.getWrappedOpenLocationCode() def getNeighbors(self, eight_point_direction=None): '''/** * Get an array", "as this one * @return an angle in radians, 0", "def getCharacterIndex(self, c): '''//following definitions copied from OpenLocationCode.java''' index =", "0.05°. The side length of this tile varies with its", "code_length == TileSize.PINPOINT.getCodeLength(): self.tile_size = TileSize.PINPOINT else: raise Exception(\"Too precise,", "match\") xDiff = int(self.getLongitudinalTileDistance(otherTile, False)) yDiff = int(self.getLatitudinalTileDistance(otherTile, False)) return", "tile areas ''' if not self.isNeighbor(neighborTile): raise Exception(\"neighborTile must be", "and potentialNeighbor are adjacent (8-neighborhood); * false if not */'''", "neighbors: if potentialNeighbor.isSameTile(neighbor): return True return False else: '''//tiles of", "def memoizeDigitDict(self, eight_point_direction, iterations_needed): base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set:", "+= tileAddress + PADDING_2 + SEPARATOR if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength():", "base_set: quickest_i = 0 for i in reversed(range(iterations_needed)): if memoized_digit_dict.get(f\"{eight_point_direction}{i", "this OpenGeoTile */''' return self.tile_size def getTileAddress(self): '''/** * A", "invalid lat/long values directly''' neighborLatitude = latitude + (delta *", "OpenLocationCode.java # The character used to pad codes. PADDING_CHARACTER =", "order above ''' uppercase_input_directions = [d.upper() for d in eight_point_direction]", "0: '''//for the first longitudinal value, we need to take", "range(int(iterations_needed)): address_set = return_set_of_subaddresses(address_set) tile_set = {OpenGeoTile(address) for address in", "salesman problem ''' ''' let's do it once, and try", "code[:-2] + SEPARATOR + code[-2:] else: code = code[:-2] +", "if not west_exists: west_set.add(west_base) memoized_digit_dict[f\"E{iterations_needed}\"] = east_set memoized_digit_dict[f\"S{iterations_needed}\"] = south_set", "is west ''' direction = direction + 'W' return direction", "len(tileAddress) == TileSize.REGION.getCodeLength(): detectedTileSize = TileSize.REGION olcBuilder += tileAddress +", "c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT):", "== TileSize.DISTRICT.getCodeLength(): self.tile_size = TileSize.DISTRICT elif code_length == TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size", "+ 'S' if self_tile_y != other_tile_y: ''' one tile is", "[+1, +1], \"W\": [ 0, -1], \"E\": [ 0, +1],", "com.google.openlocationcode.OpenLocationCode} after removing * '+' and an additional number of", "* \"tiles\", to determine all neighboring tiles of a given", "0.0025° x 0.0025°. The side length of this tile varies", "= TileSize.DISTRICT olcBuilder += tileAddress + PADDING_2 + SEPARATOR if", "+ SEPARATOR return code def return_set_of_subaddresses(set_of_addresses): for address in set_of_addresses:", "code_length == TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size = TileSize.NEIGHBORHOOD elif code_length == TileSize.PINPOINT.getCodeLength():", "+ SEPARATOR if len(tileAddress) == TileSize.REGION.getCodeLength(): detectedTileSize = TileSize.REGION olcBuilder", "characters long.''' REGION = (4, 1.0) ''' An area of", "openlocationcode import openlocationcode as olc from enum import Enum import", "(\"0\" * (full_length - len(code_address))) if desired_tile_size == TileSize.PINPOINT: code", "tileDistance += firstDiff else: tileDistance += self.characterDistance(c1, c2) if absolute_value_bool:", "direction = direction + 'W' else: if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y):", "padded with '0' characters */''' return self.getWrappedOpenLocationCode() def getNeighbors(self, eight_point_direction=None):", "including cases * where both are the same; false if", "Code is a technology developed by Google and licensed under", "returns the empty string. */''' if self.tile_size == TileSize.GLOBAL: return", "* long_diff) new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize()) if not self.isSameTile(new_OpenGeoTile):", "the Chebyshev (chessboard) distance between this and another tile of", "OpenLocationCode.java''' index = \"23456789CFGHJMPQRVWX\".find(c.upper()) if index == -1: raise Exception(\"Character", "import math, re class TileSize(Enum): ''' An area of 20°", "olcBuilder += tileAddress + PADDING_2 + SEPARATOR if len(tileAddress) ==", "or if OLC has too much padding for given tile_size", "given OpenLocationCode. * * Using this wrapper class allows to", "!= -1 def is_tile_address(plus_code): return plus_code.find(SEPARATOR) == -1 def return_code_of_tile_size(too_precise_plus_code,", "pass a short (non-full) OLC, or if OLC has too", "= direction_dict.get(direction) ''' //OLC constructor clips and normalizes, //so we", "max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile, True)) def getDirection(self, otherTile): '''/** * Returns", "for i in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2]", "for i in range(quickest_i, iterations_needed): existing_bases = memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\")", "for base in north_set: east_base = \"\" south_base = \"\"", "Tile addresses will be 4 characters long.''' REGION = (4,", "tile of the same size as this one * @return", "+ 'N' else: direction = direction + 'S' else: if", "else: ''' other tile is below -> neighborTile is south", "!= self.getTileSize(): raise Exception(\"Tile sizes don't match\") return max(self.getLatitudinalTileDistance(otherTile, True),", "not east_exists: east_set.add(east_base) if not south_exists: south_set.add(south_base) if not west_exists:", "Creates a new OpenGeoTile from lat/long coordinates. * @param latitude", "* returns the empty string. */''' if self.tile_size == TileSize.GLOBAL:", "# this code is redundant '''/** * The full {@link", "elif code: if is_tile_address(code): self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code) self.tile_address = self.code.replace(SEPARATOR,", "''' North, South, East, or West ''' base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\")", "''' all borders ''' ''' traveling salesman problem ''' '''", "code */''' if not olc.isFull(plus_code): raise Exception(\"Only full OLC supported.", "as this one. * @param potentialSameTile the OpenGeoTile to check", "0 for i in range(int(numIterations)): tileDistance *= 20 c1 =", "= self.getTileAddress()[-1] other_tile_x = neighborTile.getTileAddress()[-2] other_tile_y = neighborTile.getTileAddress()[-1] direction =", "code wrapped by this OpenGeoTile */''' return self.code def returnCode(self):", "up to approximately 5.5km. Tile addresses will be 6 characters", "- self.getCharacterIndex(c2) def getLatitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize():", "address = self.getTileAddress() if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already", "(lat and long)): raise Exception(\"Invalid OpenGeoTile constructor arguments\") if lat", "lat_diff) neighborLongitude = longitude + (delta * long_diff) new_OpenGeoTile =", "a new OpenGeoTile from an existing * {@link com.google.openlocationcode.OpenLocationCode}. *", "to OLC \"CVXW0000+\" * @return the tile address of this", "definitions copied from OpenLocationCode.java''' index = \"23456789CFGHJMPQRVWX\".find(c.upper()) if index ==", "tile, including cases * where both are the same; false", "this OpenGeoTile @throws IllegalArgumentException when trying to pass a short", "this location. * @return this tile's address with the final", "will be 6 characters long.''' DISTRICT = (6, 0.05) '''", "abs(tileDistance) return tileDistance def getLongitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() !=", "if the area potentialMember falls within the area of this", "raise Exception(\"neighborTile must be neighbor\") if neighborTile.getTileSize() != self.getTileSize(): raise", "An area of 0.0025° x 0.0025°. The side length of", "directions = [] if eight_point_direction.upper() in directions_list: directions.append(eight_point_direction.upper()) else: '''", "len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible size '''", "code = code_address + (\"0\" * (full_length - len(code_address))) if", "with '0' characters */''' return self.getWrappedOpenLocationCode() def getNeighbors(self, eight_point_direction=None): '''/**", "\"N\", \"NE\", \"E\", \"SE\", \"S\", \"SW\", \"W\"] direction_dict = {", "direction_dict = { \"NW\": [+1, -1], \"N\": [+1, 0], \"NE\":", "= otherTile.getTileAddress()[i*2 + 1] if i == 0: '''//for the", "code[:-2] + SEPARATOR return code def return_set_of_subaddresses(set_of_addresses): for address in", "North, South, East, or West ''' base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if", "if direction in uppercase_input_directions] neighbors = set() for direction in", "== TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible size ''' return", "a tile is neighboring this one. * @param potentialNeighbor the", "return \"\" else: return self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def getParentTileAddress(self): return self.getTileAddressPrefix()", "all_border_set = north_set | east_set | south_set | west_set memoized_digit_dict[f\"A{iterations_needed}\"]", "have a large margin of error, especially for big or", "BASE_20_SET} class OpenGeoTile(): ''' /** * A wrapper around an", "olc.isFull(plus_code): raise Exception(\"Only full OLC supported. Use recover().\") modified_plus_code =", "'XX', 'SE': '2X', 'SW': '22' } base = '' for", "= direction + 'W' return direction def getCharacterIndex(self, c): '''//following", "= int(self.getLongitudinalTileDistance(otherTile, False)) yDiff = int(self.getLatitudinalTileDistance(otherTile, False)) return math.atan2(yDiff, xDiff)", "= \"23456789CFGHJMPQRVWX\".find(c.upper()) if index == -1: raise Exception(\"Character does not", "def return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code = too_precise_plus_code if not is_tile_address(code): code", "raise Exception(\"eight_point_direction must be of type list or str\") if", "should return only one tile''' ordinal_digit_dict = { 'NW': 'X2',", "\"W\"] direction_dict = { \"NW\": [+1, -1], \"N\": [+1, 0],", "tiles etc. * * Open Location Code is a technology", "has too much padding for given tile_size ''' if not", "def constructTileFromTileAddress(self, tileAddress): '''/** * Creates a new OpenGeoTile from", "tile, probably padded with '0' characters */''' return self.getWrappedOpenLocationCode() def", "direction + 'N' else: direction = direction + 'S' else:", "of 20° x 20°. The side length of this tile", "up to approximately 110km. Tile addresses will be 4 characters", "self.getWrappedOpenLocationCode() def getNeighbors(self, eight_point_direction=None): '''/** * Get an array of", "the same as this one due to clipping near the", "*/''' # //if A contains B, then B's address has", "the current location @param tile_size tile size to use for", "'''/** * The exact {@link com.google.openlocationcode.OpenLocationCode} wrapped by this OpenGeoTile.", "neighbors for tiles near the poles. */''' # deltas =", "-> neighborTile is west ''' direction = direction + 'W'", "to break the code into two parts to aid memorability.", "near the poles''' neighbors.add(new_OpenGeoTile) return neighbors def isSameTile(self, potentialSameTile): '''/**", "} #lat_diff = [+1, +1, +1, 0, -1, -1, -1,", "The return value can * have a large margin of", "* where both are the same; false if not */'''", "self.constructTileFromLatLong(lat, long, tile_size) elif code and tile_size: self.constructTileFromCodeAndSize(code, tile_size) elif", "or West ''' base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: self.memoizeDigitDict(eight_point_direction,", "the globe, but can be up to approximately 275m. Tile", "True) + self.getLongitudinalTileDistance(otherTile, True) def getChebyshevTileDistanceTo(self, otherTile): '''/** * Calculates", "raise Exception(\"Tile sizes don't match\") numIterations = self.tile_size.getCodeLength()/2 #1..5 tileDistance", "direction = \"\" north_south = None if self_tile_x != other_tile_x:", "returnCode(self): return self.code def getTileSize(self): '''/** * Get the {@link", "* false if not */''' if potentialNeighbor.getTileSize() == self.getTileSize(): '''//avoid", "is below -> neighborTile is south ''' direction = direction", "[ 0, -1], \"E\": [ 0, +1], \"SW\": [-1, -1],", "= memoized_digit_dict.get(f\"W{iterations_needed}\", set()) east_exists = east_set != set() south_exists =", "+= tileAddress + PADDING_6 + SEPARATOR if len(tileAddress) == TileSize.REGION.getCodeLength():", "same area as this one. * @param potentialSameTile the OpenGeoTile", "Exception(\"Character does not exist in alphabet\") return index def characterDistance(self,", "the same area as this one. * @param potentialSameTile the", "* * Open Location Code is a technology developed by", "not */''' if potentialSameTile.getTileSize() != self.getTileSize(): return False return potentialSameTile.getTileAddress()", "+1, +1, +1, 0, -1, -1] if not type(eight_point_direction) in", "one due to clipping near the poles''' neighbors.add(new_OpenGeoTile) return neighbors", "!= self.getTileSize(): return False return potentialSameTile.getTileAddress() == self.getTileAddress() def isNeighbor(self,", "+= tileAddress[0:8] + SEPARATOR + tileAddress[8:10] if detectedTileSize == None:", "direction, +/- PI being westward direction * @throws IllegalArgumentException thrown", "'''/** * Check if a tile is neighboring this one.", "return self.tile_size def getTileAddress(self): '''/** * A tile address is", "direction = direction + 'W' return direction def getCharacterIndex(self, c):", "a prefix return potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self, otherTile): '''/** * Calculates", "CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y): ''' other tile is right -> neighborTile", "licensed under the Apache License 2.0. * For more information,", "alphabet\") return index def characterDistance(self, c1, c2): return self.getCharacterIndex(c1) -", "The exact {@link com.google.openlocationcode.OpenLocationCode} wrapped by this OpenGeoTile. * For", "east_set = set() south_set = set() west_set = set() if", "return self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile, True) def getChebyshevTileDistanceTo(self, otherTile): '''/**", "an existing {@link com.google.openlocationcode.OpenLocationCode}. @param olc OpenLocationCode for the current", "globe, but can be up to approximately 5.5km. Tile addresses", "tileDistance += self.characterDistance(c1, c2) if absolute_value_bool: return abs(tileDistance) return tileDistance", "whole tile, see {@link #getTileOpenLocationCode()}. * @return the exact plus", "for direction in directions_list if direction in uppercase_input_directions] neighbors =", "if self.isSameTile(potentialNeighbor): return False neighbors = self.getNeighbors() for neighbor in", "* @throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(String)} or", "padding for given tile_size ''' if not olc.isFull(plus_code): raise Exception(\"Only", "neighborTile): ''' returns neighbor's direction, to assist in expanding tile", "if otherTile has different {@link TileSize} */''' if otherTile.getTileSize() !=", "0, -1], \"E\": [ 0, +1], \"SW\": [-1, -1], \"S\":", "def contains(self, potentialMember): '''/** * Check if this tile contains", "to pass a short (non-full) OLC, or if OLC has", "com.google.openlocationcode.OpenLocationCode} wrapped by this OpenGeoTile. * For the plus code", "otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\") return max(self.getLatitudinalTileDistance(otherTile,", "identified by a prefix * of the given OpenLocationCode. *", "CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if", "not the smaller tile itself, is contained within the bigger", "code is redundant '''/** * The full {@link com.google.openlocationcode.OpenLocationCode} for", "plus code wrapped by this OpenGeoTile */''' return self.code def", "prefix return potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self, otherTile): '''/** * Calculates the", "is south ''' direction = direction + 'S' if self_tile_y", "can be up to approximately 14m. Tile addresses will be", "PADDING_2 + SEPARATOR if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize = TileSize.NEIGHBORHOOD", "[20.0, 1.0, 0.05, 0.0025, 0.000125] delta = self.getTileSize().getCoordinateIncrement() code_area =", "neighborTile is west ''' direction = direction + 'W' return", "- self.tile_size.getCodeLength()/2) north_set = set() east_set = set() south_set =", "memoized_digit_dict.get(f\"N{iterations_needed}\") if not north_base_set: self.memoizeDigitDict(\"N\", iterations_needed) north_set = memoized_digit_dict.get(f\"N{iterations_needed}\") east_set", "don't match\") self_tile_x = self.getTileAddress()[-2] self_tile_y = self.getTileAddress()[-1] other_tile_x =", "TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize = TileSize.NEIGHBORHOOD olcBuilder += tileAddress + SEPARATOR if", "return abs(tileDistance) return tileDistance def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength() ==", "\"2\" + relevant_digit if not west_exists: west_base += relevant_digit +", "return self.coordinate_increment # Copy from OpenLocationCode.java # A separator used", "in BASE_20_BORDER_SET if x[1] == '2'} memoized_digit_dict = { \"N1\":", "neighborTile is west ''' direction = direction + 'W' else:", "''' north will be Xd east dX south 2d west", "not east_exists: east_base += relevant_digit + \"X\" if not south_exists:", "this tile, including cases * where both are the same;", "*/''' if potentialSameTile.getTileSize() != self.getTileSize(): return False return potentialSameTile.getTileAddress() ==", "address is a string of length 2, 4, 6, 8,", "typically 8 neighboring tiles of the same size; * may", "{x for x in BASE_20_BORDER_SET if x[1] == 'X'} SOUTH_DIGITS", "Exception(\"Too precise, sort this later\") def constructTileFromCodeAndSize(self, plus_code, tile_size): '''", "west_exists: west_set.add(west_base) memoized_digit_dict[f\"E{iterations_needed}\"] = east_set memoized_digit_dict[f\"S{iterations_needed}\"] = south_set memoized_digit_dict[f\"W{iterations_needed}\"] =", "DISTRICT = (6, 0.05) ''' An area of 0.0025° x", "#1..5 tileDistance = 0 for i in range(int(numIterations)): tileDistance *=", "index == -1: raise Exception(\"Character does not exist in alphabet\")", "= east_set memoized_digit_dict[f\"S{iterations_needed}\"] = south_set memoized_digit_dict[f\"W{iterations_needed}\"] = west_set all_border_set =", "4 characters long.''' REGION = (4, 1.0) ''' An area", "this code is redundant '''/** * The full {@link com.google.openlocationcode.OpenLocationCode}", "code = code[:-2] + SEPARATOR return code def return_set_of_subaddresses(set_of_addresses): for", "on the globe, but can be up to approximately 5.5km.", "*/''' return self.getWrappedOpenLocationCode() def getNeighbors(self, eight_point_direction=None): '''/** * Get an", "'''/** * Get an array of the typically 8 neighboring", "'W' return direction def getCharacterIndex(self, c): '''//following definitions copied from", "is the address of the next biggest tile at this", "OpenGeoTile @throws IllegalArgumentException when trying to pass a short (non-full)", "north ''' direction = direction + 'N' else: ''' other", "+ 1}\") next_set = {existing_base + base for existing_base in", "= code[:-2] + SEPARATOR return code def return_set_of_subaddresses(set_of_addresses): for address", "''' if CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in [0,", "OpenGeoTile from a tile address. * @param tileAddress a tile", "0: firstDiff -= NUM_CHARACTERS_USED else: firstDiff += NUM_CHARACTERS_USED tileDistance +=", "* valid {@link com.google.openlocationcode.OpenLocationCode} after removing * '+' and an", "< CODE_ALPHABET.find(other_tile_x): ''' other tile is above -> neighborTile is", "elif code_length == TileSize.REGION.getCodeLength(): self.tile_size = TileSize.REGION elif code_length ==", "from lat/long coordinates. * @param latitude latitude of the location", "return True return False else: '''//tiles of different size are", "'2X', 'SW': '22' } base = '' for i in", "full {@link com.google.openlocationcode.OpenLocationCode} for this tile. Other than * {@link", "west d2''' for n_tuple in base_tuple_list: relevant_digit = n_tuple[1] if", "angle in radians, 0 being an eastward direction, +/- PI", "memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: quickest_i = 0 for i in", "0: ''' other tile is above -> neighborTile is north", "other_tile_x = neighborTile.getTileAddress()[-2] other_tile_y = neighborTile.getTileAddress()[-1] direction = \"\" north_south", "memoized_digit_dict.get(f\"N{iterations_needed}\") east_set = memoized_digit_dict.get(f\"E{iterations_needed}\", set()) south_set = memoized_digit_dict.get(f\"S{iterations_needed}\", set()) west_set", "otherTile): '''/** * Calculates the Manhattan (city block) distance between", "* @param otherTile another tile of the same size as", "self.memoizeDigitDict(\"N\", iterations_needed) north_set = memoized_digit_dict.get(f\"N{iterations_needed}\") east_set = memoized_digit_dict.get(f\"E{iterations_needed}\", set()) south_set", "wrapped by this OpenGeoTile. * For the plus code of", "For the plus code of the whole tile, see {@link", "north_base_set: self.memoizeDigitDict(\"N\", iterations_needed) north_set = memoized_digit_dict.get(f\"N{iterations_needed}\") east_set = memoized_digit_dict.get(f\"E{iterations_needed}\", set())", "big' raise Exception(\"OLC padding larger than allowed by desired_tile_size\") iterations_needed", "is too big' raise Exception(\"OLC padding larger than allowed by", "tile_size ''' if not olc.isFull(plus_code): raise Exception(\"Only full OLC supported.", "self.tile_size.getCodeLength()-2] def getParentTileAddress(self): return self.getTileAddressPrefix() def getTileOpenLocationCode(self): # this code", "self.code def returnCode(self): return self.code def getTileSize(self): '''/** * Get", "tile_size) elif code and tile_size: self.constructTileFromCodeAndSize(code, tile_size) elif code: if", "elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise Exception(\"OLC", "direction = direction + 'N' else: ''' other tile is", "this OpenGeoTile. * @return the {@link TileSize} of this OpenGeoTile", "set() south_set = set() west_set = set() if isinstance(eight_point_direction, str):", "memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\"): quickest_i = i break for i in", "size as this one * @return an angle in radians,", "* {@link com.google.openlocationcode.OpenLocationCode} after padding with an appropriate * number", "@return this tile's address with the final two characters removed.", "0 for i in reversed(range(iterations_needed)): if memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\"): quickest_i", "int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2) north_set = set() east_set = set() south_set", "Exception(\"Tile sizes don't match\") numIterations = self.tile_size.getCodeLength()/2 #1..5 tileDistance =", "# The character used to pad codes. PADDING_CHARACTER = '0'", "and normalizes, //so we don't have to deal with invalid", "* Check if this tile contains another one. * @param", "//so we don't have to deal with invalid lat/long values", "18 #; //360°/20° = 18 if abs(firstDiff) > NUM_CHARACTERS_USED/2: if", "@param tile_size tile size to use for this OpenGeoTile @throws", "in directions_list: directions.append(eight_point_direction.upper()) else: ''' this list construction keeps directions", "len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_x) == 0:", "1th value''' return self.coordinate_increment # Copy from OpenLocationCode.java # A", "east dX south 2d west d2''' for n_tuple in base_tuple_list:", "potentialMember falls within the area of this tile, including cases", "size. * @param otherTile another tile of the same size", "if not south_exists: south_base += \"2\" + relevant_digit if not", "within the bigger tile''' if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength(): smallerTile =", "west ''' direction = direction + 'W' else: if CODE_ALPHABET.find(self_tile_y)", "to aid memorability. SEPARATOR = '+' # Copy from OpenLocationCode.java", "a GLOBAL tile, * returns the empty string. */''' if", "false if not */''' # //if A contains B, then", "= coordinate_increment def getCodeLength(self): '''get 0th value''' return self.code_length def", "plus_code, tile_size): ''' Creates a new OpenGeoTile from an existing", "'''/** * Returns the approximate direction of the other tile", "set() south_exists = south_set != set() west_exists = west_set !=", "to approximately 5.5km. Tile addresses will be 6 characters long.'''", "coordinate_increment): self.code_length = code_length self.coordinate_increment = coordinate_increment def getCodeLength(self): '''get", "a [2/4/6/8/10]-character string that corresponds to a * valid {@link", "+ SEPARATOR if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize = TileSize.NEIGHBORHOOD olcBuilder", "TileSize(Enum): ''' An area of 20° x 20°. The side", "for x in BASE_20_BORDER_SET if x[0] == 'X'} EAST_DIGITS =", "if neighborTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\") self_tile_x", "== None: print(tileAddress) raise Exception(\"Invalid tile address\") self.tile_size = detectedTileSize", "isNeighbor(self, potentialNeighbor): '''/** * Check if a tile is neighboring", "= set([self.getTileAddress()]) for i in range(int(iterations_needed)): address_set = return_set_of_subaddresses(address_set) tile_set", "longitudinal value, we need to take care of wrapping -", "potentialNeighbor if biggerTile.contains(smallerTile): return False neighbors = smallerTile.getNeighbors() for neighbor", "removing * '+' and an additional number of trailing characters;", "poles. */''' # deltas = [20.0, 1.0, 0.05, 0.0025, 0.000125]", "//OLC constructor clips and normalizes, //so we don't have to", "*/''' # deltas = [20.0, 1.0, 0.05, 0.0025, 0.000125] delta", "this. The return value can * have a large margin", "\"00\" PADDING_4 = \"0000\" PADDING_6 = \"000000\" CODE_ALPHABET = olc.CODE_ALPHABET_", "= east_set != set() south_exists = south_set != set() west_exists", "the typically 8 neighboring tiles of the same size; *", "= neighborTile.getTileAddress()[-2] other_tile_y = neighborTile.getTileAddress()[-1] direction = \"\" north_south =", "str]: raise Exception(\"eight_point_direction must be of type list or str\")", "is contained within the bigger tile''' if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength():", "CODE_ALPHABET for y in CODE_ALPHABET} BASE_20_BORDER_SET = {x for x", "potentialNeighbor are adjacent (8-neighborhood); * false if not */''' if", "self.getTileAddressPrefix() def getTileOpenLocationCode(self): # this code is redundant '''/** *", "lat/long values directly''' neighborLatitude = latitude + (delta * lat_diff)", "the smaller tile itself, is contained within the bigger tile'''", "code for the whole tile. * @return a plus code", "= plus_code.find(PADDING_CHARACTER) else: code_length = min(len(plus_code)-1, 10) if code_length ==", "in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2] c2 =", "otherTile.getTileAddress()[i*2 + 1] if i == 0: '''//for the first", "are the same; false if not */''' # //if A", "1.0, 0.05, 0.0025, 0.000125] delta = self.getTileSize().getCoordinateIncrement() code_area = olc.decode(self.code)", "is of * invalid length */''' detectedTileSize = None olcBuilder", "address in set_of_addresses: if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already", "TileSize.REGION elif code_length == TileSize.DISTRICT.getCodeLength(): self.tile_size = TileSize.DISTRICT elif code_length", "= direction + 'S' if self_tile_y != other_tile_y: ''' one", "address already minimum possible size ''' return None elif self.tile_size.getCodeLength()", "size is * determined by the length of this address", "long.''' DISTRICT = (6, 0.05) ''' An area of 0.0025°", "= \"\" base_tuple_list = re.findall('..', base) ''' north will be", "for base in all_border_set} elif len(eight_point_direction) == 1: ''' North,", "= north_set | east_set | south_set | west_set memoized_digit_dict[f\"A{iterations_needed}\"] =", "too_precise_plus_code if not is_tile_address(code): code = code.replace(SEPARATOR, '') if is_padded(code):", "tile_size=None): '''/** * Creates a new OpenGeoTile from lat/long coordinates.", "by this OpenGeoTile. * For the plus code of the", "if isinstance(eight_point_direction, str): eight_point_direction = eight_point_direction.upper() set_of_border_subaddresses = set() if", "if self_tile_x != other_tile_x: ''' one tile is above the", "* @return the exact plus code wrapped by this OpenGeoTile", "\"tiles\", to determine all neighboring tiles of a given one,", "self_tile_x = self.getTileAddress()[-2] self_tile_y = self.getTileAddress()[-1] other_tile_x = neighborTile.getTileAddress()[-2] other_tile_y", "self.code def getTileSize(self): '''/** * Get the {@link TileSize} of", "OLC has too much padding for given tile_size ''' if", "'S' if self_tile_y != other_tile_y: ''' one tile is above", "* {@link OpenLocationCode#OpenLocationCode(double, double, int)} */''' if not tile_size: tile_size", "i in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2] c2", "= self.getTileSize().getCoordinateIncrement() code_area = olc.decode(self.code) latitude = code_area.latitudeCenter longitude =", "n_tuple in base_tuple_list: relevant_digit = n_tuple[1] if not east_exists: east_base", "* @return this tile's address with the final two characters", "self.code_length = code_length self.coordinate_increment = coordinate_increment def getCodeLength(self): '''get 0th", "self.code.replace(SEPARATOR, \"\")[0: self.tile_size.getCodeLength()] def constructTileFromCode(self, plus_code): '''/** * Creates a", "assist in expanding tile areas ''' if not self.isNeighbor(neighborTile): raise", "Manhattan (city block) distance between this and another tile of", "+= tileAddress + SEPARATOR if len(tileAddress) == TileSize.PINPOINT.getCodeLength(): detectedTileSize =", "approximately 110km. Tile addresses will be 4 characters long.''' REGION", "cases * where both are the same; false if not", "address\") self.tile_size = detectedTileSize self.code = olcBuilder.upper() def getWrappedOpenLocationCode(self): #", "self.tile_size.getCodeLength(): smallerTile = potentialNeighbor biggerTile = self else: smallerTile =", "contains(self, potentialMember): '''/** * Check if this tile contains another", "the poles''' neighbors.add(new_OpenGeoTile) return neighbors def isSameTile(self, potentialSameTile): '''/** *", "/** * A wrapper around an {@code OpenLocationCode} object, focusing", "-1], \"N\": [+1, 0], \"NE\": [+1, +1], \"W\": [ 0,", "code_length, coordinate_increment): self.code_length = code_length self.coordinate_increment = coordinate_increment def getCodeLength(self):", "2.0. * For more information, see https://github.com/google/open-location-code * * @author", "the same size as this one * @return an angle", "olc.decode(self.code) latitude = code_area.latitudeCenter longitude = code_area.longitudeCenter '''directions_list included to", "west_set = memoized_digit_dict.get(f\"W{iterations_needed}\", set()) east_exists = east_set != set() south_exists", "= memoized_digit_dict.get(f\"S{iterations_needed}\", set()) west_set = memoized_digit_dict.get(f\"W{iterations_needed}\", set()) east_exists = east_set", "olc.encode(lat, long, tile_size.getCodeLength()).upper() self.tile_size = tile_size def constructTileFromTileAddress(self, tileAddress): '''/**", "SEPARATOR if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize = TileSize.NEIGHBORHOOD olcBuilder +=", "tile_size def constructTileFromTileAddress(self, tileAddress): '''/** * Creates a new OpenGeoTile", "the {@link TileSize} of this OpenGeoTile */''' return self.tile_size def", "for x in CODE_ALPHABET for y in CODE_ALPHABET} BASE_20_BORDER_SET =", "different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile", "direction + 'E' else: ''' other tile is left ->", "'''/** * The prefix of a tile address is the", "= { 'NW': 'X2', 'NE': 'XX', 'SE': '2X', 'SW': '22'", "self.tile_size.getCodeLength()/2 address_set = set([self.getTileAddress()]) for i in range(int(iterations_needed)): address_set =", "direction = direction + 'N' else: direction = direction +", "firstDiff -= NUM_CHARACTERS_USED else: firstDiff += NUM_CHARACTERS_USED tileDistance += firstDiff", "*/''' if not olc.isFull(plus_code): raise Exception(\"Only full OLC supported. Use", "com.google.openlocationcode.OpenLocationCode}. @param olc OpenLocationCode for the current location @param tile_size", "may return less than 8 neighbors for tiles near the", "return False def contains(self, potentialMember): '''/** * Check if this", "other ''' if CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in", "characters long.''' NEIGHBORHOOD = (8, 0.0025) ''' An area of", "firstDiff else: tileDistance += self.characterDistance(c1, c2) if absolute_value_bool: return abs(tileDistance)", "iterations_needed) north_set = memoized_digit_dict.get(f\"N{iterations_needed}\") east_set = memoized_digit_dict.get(f\"E{iterations_needed}\", set()) south_set =", "self biggerTile = potentialNeighbor if biggerTile.contains(smallerTile): return False neighbors =", "olc.CODE_ALPHABET_ BASE_20_SET = {x+y for x in CODE_ALPHABET for y", "tile is left -> neighborTile is west ''' direction =", "+= ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address + base)} def memoizeDigitDict(self, eight_point_direction, iterations_needed):", "* may return less than 8 neighbors for tiles near", "don't match\") return max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile, True)) def getDirection(self, otherTile):", "\"W1\": WEST_DIGITS, } def is_padded(plus_code): return plus_code.find(PADDING_CHARACTER) != -1 def", "else: tileDistance += self.characterDistance(c1, c2) if absolute_value_bool: return abs(tileDistance) return", "Tile addresses will be 8 characters long.''' NEIGHBORHOOD = (8,", "whole tile. * @return a plus code for the whole", "= TileSize.GLOBAL olcBuilder += tileAddress + PADDING_6 + SEPARATOR if", "are the same; false if not */''' if potentialSameTile.getTileSize() !=", "match\") numIterations = self.tile_size.getCodeLength()/2 #1..5 tileDistance = 0 for i", "True return False def contains(self, potentialMember): '''/** * Check if", "= '+' # Copy from OpenLocationCode.java # The character used", "= '0' PADDING_2 = \"00\" PADDING_4 = \"0000\" PADDING_6 =", "= { \"NW\": [+1, -1], \"N\": [+1, 0], \"NE\": [+1,", "south_set memoized_digit_dict[f\"W{iterations_needed}\"] = west_set all_border_set = north_set | east_set |", "array of the typically 8 neighboring tiles of the same", "if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\") numIterations", "self.tile_size = detectedTileSize self.code = olcBuilder.upper() def getWrappedOpenLocationCode(self): # this", "== '2'} WEST_DIGITS = {x for x in BASE_20_BORDER_SET if", "'''//avoid iterating over neighbors for same tile''' if self.isSameTile(potentialNeighbor): return", "For more information, see https://github.com/google/open-location-code * * @author <NAME> *", "= potentialNeighbor if biggerTile.contains(smallerTile): return False neighbors = smallerTile.getNeighbors() for", "len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles", "and used as such. * @param otherTile another tile of", "TileSize.NEIGHBORHOOD olcBuilder += tileAddress + SEPARATOR if len(tileAddress) == TileSize.PINPOINT.getCodeLength():", "> NUM_CHARACTERS_USED/2: if firstDiff > 0: firstDiff -= NUM_CHARACTERS_USED else:", "east_set memoized_digit_dict[f\"S{iterations_needed}\"] = south_set memoized_digit_dict[f\"W{iterations_needed}\"] = west_set all_border_set = north_set", "allowed by desired_tile_size\") iterations_needed = desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2 address_set =", "in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]: ''' ajacent", "TileSize.PINPOINT else: raise Exception(\"Too precise, sort this later\") def constructTileFromCodeAndSize(self,", "direction + 'S' if self_tile_y != other_tile_y: ''' one tile", "addresses will be 4 characters long.''' REGION = (4, 1.0)", "c2 = otherTile.getTileAddress()[i*2] tileDistance += self.characterDistance(c1,c2) if absolute_value_bool: return abs(tileDistance)", "if potentialNeighbor.getTileSize() == self.getTileSize(): '''//avoid iterating over neighbors for same", "* * @author <NAME> * @version 0.1.0 */ Ported by", "!= self.getTileSize(): raise Exception(\"Tile sizes don't match\") numIterations = self.tile_size.getCodeLength()/2", "thrown if tileAddress is of * invalid length */''' detectedTileSize", "to approximately 110km. Tile addresses will be 4 characters long.'''", "A tile address is a string of length 2, 4,", "full plus code for the whole tile. * @return a", "@return an angle in radians, 0 being an eastward direction,", "the same; false if not */''' # //if A contains", "{x+y for x in CODE_ALPHABET for y in CODE_ALPHABET} BASE_20_BORDER_SET", "= tile_size def constructTileFromLatLong(self, lat: float, long: float, tile_size=None): '''/**", "\"SE\", \"S\", \"SW\", \"W\"] direction_dict = { \"NW\": [+1, -1],", "An area of 0.05° x 0.05°. The side length of", "OpenLocationCode. * * Using this wrapper class allows to determine", "latitude of the location * @param longitude longitude of the", "\"E\", \"SE\", \"S\", \"SW\", \"W\"] direction_dict = { \"NW\": [+1,", "base in base_set} elif len(eight_point_direction) == 2: ''' NW, NE,", "} base = '' for i in range(iterations_needed): base +=", "#; //360°/20° = 18 if abs(firstDiff) > NUM_CHARACTERS_USED/2: if firstDiff", "= code[:desired_tile_size.getCodeLength()] full_length = TileSize.PINPOINT.getCodeLength() code = code_address + (\"0\"", "olc is not a full code */''' if not olc.isFull(plus_code):", "being westward direction * @throws IllegalArgumentException thrown if otherTile has", "of this address * @throws IllegalArgumentException passed through from *", "adjacent if at least one neighbor of the smaller tile,", "away tiles, so this should only be * interpreted as", "neighborTile.getTileAddress()[-2] other_tile_y = neighborTile.getTileAddress()[-1] direction = \"\" north_south = None", "another one. * @param potentialMember the OpenGeoTile to check *", "''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_x) == 0: '''", "is effectively redundant as python has no wrapping '''/** *", "constructTileFromCode(self, plus_code): '''/** * Creates a new OpenGeoTile from an", "in set_of_addresses: if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum", "None: print(tileAddress) raise Exception(\"Invalid tile address\") self.tile_size = detectedTileSize self.code", "if not south_exists: south_set.add(south_base) if not west_exists: west_set.add(west_base) memoized_digit_dict[f\"E{iterations_needed}\"] =", "neighbors: if biggerTile.contains(neighbor): return True return False def contains(self, potentialMember):", "= self.code.replace(SEPARATOR, \"\")[0: self.tile_size.getCodeLength()] def constructTileFromCode(self, plus_code): '''/** * Creates", "in neighbors: if biggerTile.contains(neighbor): return True return False def contains(self,", "raise Exception(\"Invalid OpenGeoTile constructor arguments\") if lat and long: self.constructTileFromLatLong(lat,", "= direction + 'N' else: ''' other tile is below", "int(self.getLongitudinalTileDistance(otherTile, False)) yDiff = int(self.getLatitudinalTileDistance(otherTile, False)) return math.atan2(yDiff, xDiff) def", "self.getLongitudinalTileDistance(otherTile, True)) def getDirection(self, otherTile): '''/** * Returns the approximate", "{x for x in BASE_20_BORDER_SET if x[0] == 'X'} EAST_DIGITS", "direction = direction + 'E' else: ''' other tile is", "len(code_address))) if desired_tile_size == TileSize.PINPOINT: code = code[:-2] + SEPARATOR", "address in set_of_addresses for base in BASE_20_SET} class OpenGeoTile(): '''", "other tile relative to this. The return value can *", "and another tile of the same size. * @param otherTile", "in [type(None), list, str]: raise Exception(\"eight_point_direction must be of type", "a technology developed by Google and licensed under the Apache", "an eastward direction, +/- PI being westward direction * @throws", "== TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize = TileSize.NEIGHBORHOOD olcBuilder += tileAddress + SEPARATOR", "= [-1, 0, +1, +1, +1, 0, -1, -1] if", "of the other tile relative to this. The return value", "direction of the other tile relative to this. The return", "same size. * @return an array of the typically 8", "than * {@link #getWrappedOpenLocationCode()}, this will return a full plus", "* @throws IllegalArgumentException thrown if otherTile has different {@link TileSize}", "latitude = code_area.latitudeCenter longitude = code_area.longitudeCenter '''directions_list included to keep", "we don't have to deal with invalid lat/long values directly'''", "self.getTileSize(): return False return potentialSameTile.getTileAddress() == self.getTileAddress() def isNeighbor(self, potentialNeighbor):", "''' return None elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too", "* case the resulting OpenGeoTile will have a larger TileSize.", "don't have to deal with invalid lat/long values directly''' neighborLatitude", "is_padded(plus_code): return plus_code.find(PADDING_CHARACTER) != -1 def is_tile_address(plus_code): return plus_code.find(SEPARATOR) ==", "Exception(\"Only full OLC supported. Use recover().\") modified_plus_code = return_code_of_tile_size(plus_code, tile_size)", "thrown if otherTile has different {@link TileSize} */''' if otherTile.getTileSize()", "for address in address_set} return tile_set def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None):", "potentialNeighbor): '''/** * Check if a tile is neighboring this", "elif code_length == TileSize.DISTRICT.getCodeLength(): self.tile_size = TileSize.DISTRICT elif code_length ==", "desired_tile_size == TileSize.PINPOINT: code = code[:-2] + SEPARATOR + code[-2:]", "the first longitudinal value, we need to take care of", "tile. Other than * {@link #getWrappedOpenLocationCode()}, this will return a", "trying to pass a short (non-full) OLC, or if OLC", "B, then B's address has A's address as a prefix", "for the whole tile, probably padded with '0' characters */'''", "check * @return true if the area potentialMember falls within", "self.getTileSize(): raise Exception(\"Tile sizes don't match\") return max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile,", "+= firstDiff else: tileDistance += self.characterDistance(c1, c2) if absolute_value_bool: return", "base in all_border_set} elif len(eight_point_direction) == 1: ''' North, South,", "east_exists: east_set.add(east_base) if not south_exists: south_set.add(south_base) if not west_exists: west_set.add(west_base)", "+ SEPARATOR if len(tileAddress) == TileSize.DISTRICT.getCodeLength(): detectedTileSize = TileSize.DISTRICT olcBuilder", "lat=None, long=None, ): if not (code or (code and tile_size)", "tile at this location. * @return this tile's address with", "-= NUM_CHARACTERS_USED else: firstDiff += NUM_CHARACTERS_USED tileDistance += firstDiff else:", "def constructTileFromCodeAndSize(self, plus_code, tile_size): ''' Creates a new OpenGeoTile from", "existing {@link com.google.openlocationcode.OpenLocationCode}. @param olc OpenLocationCode for the current location", "short (non-full) OLC, or if OLC has too much padding", "of the next biggest tile at this location. * @return", "-1], \"E\": [ 0, +1], \"SW\": [-1, -1], \"S\": [-1,", "+ 1}\"): quickest_i = i break for i in range(quickest_i,", "False def contains(self, potentialMember): '''/** * Check if this tile", "+ 'W' else: if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y): ''' other tile", "{OpenGeoTile(address + base) for base in base_set} elif len(eight_point_direction) ==", "value''' return self.code_length def getCoordinateIncrement(self): '''get 1th value''' return self.coordinate_increment", "= tile_size def constructTileFromTileAddress(self, tileAddress): '''/** * Creates a new", "direction, to assist in expanding tile areas ''' if not", "does not exist in alphabet\") return index def characterDistance(self, c1,", "case the resulting OpenGeoTile will have a larger TileSize. *", "0.05° x 0.05°. The side length of this tile varies", "potentialNeighbor the OpenGeoTile to check * @return true if this", "re class TileSize(Enum): ''' An area of 20° x 20°.", "0], \"SE\": [-1, +1], } #lat_diff = [+1, +1, +1,", "to use for this OpenGeoTile @throws IllegalArgumentException when trying to", "str): directions = [] if eight_point_direction.upper() in directions_list: directions.append(eight_point_direction.upper()) else:", "= set() south_set = set() west_set = set() if isinstance(eight_point_direction,", "eight_point_direction is None: ''' all borders ''' ''' traveling salesman", "if not */''' if potentialNeighbor.getTileSize() == self.getTileSize(): '''//avoid iterating over", "8 neighbors for tiles near the poles. */''' # deltas", "potentialMember the OpenGeoTile to check * @return true if the", "false if not */''' if potentialNeighbor.getTileSize() == self.getTileSize(): '''//avoid iterating", "+ code[-2:] else: code = code[:-2] + SEPARATOR return code", "not south_exists: south_base += \"2\" + relevant_digit if not west_exists:", "longitude longitude of the location * @param tile_size tile size", "\"\" base_tuple_list = re.findall('..', base) ''' north will be Xd", "@author <NAME> * @version 0.1.0 */ Ported by scoofy on", "precise, sort this later\") def constructTileFromCodeAndSize(self, plus_code, tile_size): ''' Creates", "are adjacent if at least one neighbor of the smaller", "neighboring this one. * @param potentialNeighbor the OpenGeoTile to check", "= code_address + (\"0\" * (full_length - len(code_address))) if desired_tile_size", "NUM_CHARACTERS_USED = 18 #; //360°/20° = 18 if abs(firstDiff) >", "for i in range(iterations_needed): base += ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address +", "same size as this one * @return an angle in", "quickest_i = 0 for i in reversed(range(iterations_needed)): if memoized_digit_dict.get(f\"{eight_point_direction}{i +", "add tiles that are the same as this one due", "if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength(): raise Exception(\"OLC padding larger than allowed", "(code and tile_size) or (lat and long)): raise Exception(\"Invalid OpenGeoTile", "approximately 275m. Tile addresses will be 8 characters long.''' NEIGHBORHOOD", "directions_list = [\"NW\", \"N\", \"NE\", \"E\", \"SE\", \"S\", \"SW\", \"W\"]", "tile address of this OpenGeoTile; */''' return self.tile_address def getTileAddressPrefix(self):", "to this. The return value can * have a large", "if this and potentialNeighbor are adjacent (8-neighborhood); * false if", "elif isinstance(eight_point_direction, str): directions = [] if eight_point_direction.upper() in directions_list:", "* @throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(double, double,", "with the final two characters removed. In case of a", "appropriate * number of '0' and '+' characters. Example: Address", "by scoofy on 08.31.21 ''' def __init__(self, code=None, tile_size=None, lat=None,", "this OpenGeoTile. * For the plus code of the whole", "return abs(tileDistance) return tileDistance def getLongitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize()", "south_base += \"2\" + relevant_digit if not west_exists: west_base +=", "on the area identified by a prefix * of the", "in expanding tile areas ''' if not self.isNeighbor(neighborTile): raise Exception(\"neighborTile", "'' for i in range(iterations_needed): base += ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address", "def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength(): ''' tile is", "and '+' characters. Example: Address \"CVXW\" corresponds to OLC \"CVXW0000+\"", "west_set = set() if isinstance(eight_point_direction, str): eight_point_direction = eight_point_direction.upper() set_of_border_subaddresses", "x[0] == '2'} WEST_DIGITS = {x for x in BASE_20_BORDER_SET", "self.tile_size = TileSize.GLOBAL elif code_length == TileSize.REGION.getCodeLength(): self.tile_size = TileSize.REGION", "code_address = code[:desired_tile_size.getCodeLength()] full_length = TileSize.PINPOINT.getCodeLength() code = code_address +", "set_of_addresses for base in BASE_20_SET} class OpenGeoTile(): ''' /** *", "NUM_CHARACTERS_USED tileDistance += firstDiff else: tileDistance += self.characterDistance(c1, c2) if", "is above the other ''' if CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1]", "this should only be * interpreted as a very rough", "0, +1, +1, +1, 0, -1, -1] if not type(eight_point_direction)", "== self.getTileSize(): '''//avoid iterating over neighbors for same tile''' if", "return max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile, True)) def getDirection(self, otherTile): '''/** *", "this and another tile of the same size. * @param", "addresses will be 8 characters long.''' NEIGHBORHOOD = (8, 0.0025)", "Google and licensed under the Apache License 2.0. * For", "TileSize.DISTRICT.getCodeLength(): detectedTileSize = TileSize.DISTRICT olcBuilder += tileAddress + PADDING_2 +", "describes the same area as this one. * @param potentialSameTile", "existing * {@link com.google.openlocationcode.OpenLocationCode}. * @param olc OpenLocationCode for the", "PADDING_4 + SEPARATOR if len(tileAddress) == TileSize.DISTRICT.getCodeLength(): detectedTileSize = TileSize.DISTRICT", "same size; * may return less than 8 neighbors for", "# Copy from OpenLocationCode.java # The character used to pad", "the bigger tile''' if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength(): smallerTile = potentialNeighbor", "one tile''' ordinal_digit_dict = { 'NW': 'X2', 'NE': 'XX', 'SE':", "(10, 0.000125) def __init__(self, code_length, coordinate_increment): self.code_length = code_length self.coordinate_increment", "constructTileFromTileAddress(self, tileAddress): '''/** * Creates a new OpenGeoTile from a", "to approximately 275m. Tile addresses will be 8 characters long.'''", "or (code and tile_size) or (lat and long)): raise Exception(\"Invalid", "the same; false if not */''' if potentialSameTile.getTileSize() != self.getTileSize():", "= plus_code.upper() if is_padded(plus_code): code_length = plus_code.find(PADDING_CHARACTER) else: code_length =", "padding larger than allowed by desired_tile_size\") code_address = code[:desired_tile_size.getCodeLength()] full_length", "address * @throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(String)}", "if absolute_value_bool: return abs(tileDistance) return tileDistance def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if", "match\") numIterations = self.tile_size.getCodeLength()/2 #; //1..5 tileDistance = 0 for", "def getWrappedOpenLocationCode(self): # this code is effectively redundant as python", "def getTileAddressPrefix(self): '''/** * The prefix of a tile address", "not west_exists: west_base += relevant_digit + \"2\" if not east_exists:", "is_padded(plus_code): code_length = plus_code.find(PADDING_CHARACTER) else: code_length = min(len(plus_code)-1, 10) if", "# Copy from OpenLocationCode.java # A separator used to break", "self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code) self.tile_address = self.code.replace(SEPARATOR, \"\")[0: self.tile_size.getCodeLength()] def constructTileFromCode(self,", "padded code, in which * case the resulting OpenGeoTile will", "* @param tileAddress a tile address is a [2/4/6/8/10]-character string", "by this OpenGeoTile */''' return self.code def returnCode(self): return self.code", "GLOBAL = (2, 20.0) ''' An area of 1° x", "[d.upper() for d in eight_point_direction] directions = [direction for direction", "value corresponding to the number of tiles of the given", "@param longitude longitude of the location * @param tile_size tile", "* A tile address is a string of length 2,", "do so''' firstDiff = self.characterDistance(c1, c2) NUM_CHARACTERS_USED = 18 #;", "match\") self_tile_x = self.getTileAddress()[-2] self_tile_y = self.getTileAddress()[-1] other_tile_x = neighborTile.getTileAddress()[-2]", "existing_bases = memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\") next_set = {existing_base + base", "> 0: firstDiff -= NUM_CHARACTERS_USED else: firstDiff += NUM_CHARACTERS_USED tileDistance", "a tile address is a [2/4/6/8/10]-character string that corresponds to", "in BASE_20_BORDER_SET if x[1] == 'X'} SOUTH_DIGITS = {x for", "swaping digits ''' all_border_set = memoized_digit_dict.get(f\"A{iterations_needed}\") if not all_border_set: north_base_set", "plus_code.find(PADDING_CHARACTER) != -1 def is_tile_address(plus_code): return plus_code.find(SEPARATOR) == -1 def", "to a * valid {@link com.google.openlocationcode.OpenLocationCode} after removing * '+'", "{@link OpenLocationCode#OpenLocationCode(String)} or thrown if tileAddress is of * invalid", "False)) yDiff = int(self.getLatitudinalTileDistance(otherTile, False)) return math.atan2(yDiff, xDiff) def getEightPointDirectionOfNeighbor(self,", "to a valid * {@link com.google.openlocationcode.OpenLocationCode} after padding with an", "to deal with invalid lat/long values directly''' neighborLatitude = latitude", "!= other_tile_y: ''' one tile is above the other '''", "!= self.getTileSize(): raise Exception(\"Tile sizes don't match\") xDiff = int(self.getLongitudinalTileDistance(otherTile,", "= potentialNeighbor biggerTile = self else: smallerTile = self biggerTile", "tile is above the other ''' if CODE_ALPHABET.find(self_tile_x) in [0,", "= self.tile_size.getCodeLength()/2 #; //1..5 tileDistance = 0 for i in", "i in reversed(range(iterations_needed)): if memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\"): quickest_i = i", "Calculates the Chebyshev (chessboard) distance between this and another tile", "| east_set | south_set | west_set memoized_digit_dict[f\"A{iterations_needed}\"] = all_border_set return", "a padded code, in which * case the resulting OpenGeoTile", "a new OpenGeoTile from a tile address. * @param tileAddress", "[0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent", "* be traversed getting from one to the other tile", "base = '' for i in range(iterations_needed): base += ordinal_digit_dict.get(eight_point_direction)", "if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible size", "if a tile describes the same area as this one.", "{@link com.google.openlocationcode.OpenLocationCode}. @param olc OpenLocationCode for the current location @param", "\"\")[0: self.tile_size.getCodeLength()] def constructTileFromCode(self, plus_code): '''/** * Creates a new", "use for this OpenGeoTile @throws IllegalArgumentException when trying to pass", "north_south = None if self_tile_x != other_tile_x: ''' one tile", "range(iterations_needed): base += ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address + base)} def memoizeDigitDict(self,", "self.tile_address def getTileAddressPrefix(self): '''/** * The prefix of a tile", "not tile_size: tile_size = TileSize.PINPOINT self.code = olc.encode(lat, long, tile_size.getCodeLength()).upper()", "= detectedTileSize self.code = olcBuilder.upper() def getWrappedOpenLocationCode(self): # this code", "CODE_ALPHABET.find(other_tile_x) == 0: ''' other tile is above -> neighborTile", "and try to reduce by swaping digits ''' all_border_set =", "of trailing characters; tile size is * determined by the", "= {x for x in BASE_20_BORDER_SET if x[0] == '2'}", "of a given one, to calculate a distance in tiles", "getTileAddress(self): '''/** * A tile address is a string of", "of this OpenGeoTile; */''' return self.tile_address def getTileAddressPrefix(self): '''/** *", "keeps directions in the order above ''' uppercase_input_directions = [d.upper()", "+= relevant_digit + \"2\" if not east_exists: east_set.add(east_base) if not", "+= NUM_CHARACTERS_USED tileDistance += firstDiff else: tileDistance += self.characterDistance(c1, c2)", "'N' else: direction = direction + 'S' else: if CODE_ALPHABET.find(self_tile_x)", "than allowed by desired_tile_size\") iterations_needed = int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2) north_set", "wrapper class allows to determine whether two locations are in", "OpenGeoTile */''' return self.code def returnCode(self): return self.code def getTileSize(self):", "'''/** * Calculates the Manhattan (city block) distance between this", "getNeighbors(self, eight_point_direction=None): '''/** * Get an array of the typically", "of the location * @param longitude longitude of the location", "direction + 'S' else: if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x): ''' other", "if eight_point_direction is None: directions = directions_list elif isinstance(eight_point_direction, str):", "tile is below -> neighborTile is south ''' direction =", "1: ''' North, South, East, or West ''' base_set =", "(8, 0.0025) ''' An area of 0.000125° x 0.000125°. The", "in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2 + 1]", "''' NW, NE, SW, SE... should return only one tile'''", "\"SE\": [-1, +1], } #lat_diff = [+1, +1, +1, 0,", "if this tile contains another one. * @param potentialMember the", "around, do so''' firstDiff = self.characterDistance(c1, c2) NUM_CHARACTERS_USED = 18", "next biggest tile at this location. * @return this tile's", "+1], \"SW\": [-1, -1], \"S\": [-1, 0], \"SE\": [-1, +1],", "if tile sizes and addresses are the same; false if", "*/''' detectedTileSize = None olcBuilder = \"\" if len(tileAddress) ==", "eight_point_direction] directions = [direction for direction in directions_list if direction", "memoized_digit_dict[f\"A{iterations_needed}\"] = all_border_set return {OpenGeoTile(address+base) for base in all_border_set} elif", "problem ''' ''' let's do it once, and try to", "0.000125°. The side length of this tile varies with its", "not type(eight_point_direction) in [type(None), list, str]: raise Exception(\"eight_point_direction must be", "tile of the same size. * @param otherTile another tile", "''' other tile is left -> neighborTile is west '''", "check * @return true if this and potentialNeighbor are adjacent", "code_length = min(len(plus_code)-1, 10) if code_length == TileSize.GLOBAL.getCodeLength(): self.tile_size =", "set() east_set = set() south_set = set() west_set = set()", "through from * {@link OpenLocationCode#OpenLocationCode(String)} or thrown if tileAddress is", "directly''' neighborLatitude = latitude + (delta * lat_diff) neighborLongitude =", "break the code into two parts to aid memorability. SEPARATOR", "BASE_20_BORDER_SET if x[0] == 'X'} EAST_DIGITS = {x for x", "Exception(\"OLC padding larger than allowed by desired_tile_size\") code_address = code[:desired_tile_size.getCodeLength()]", "in ['2', 'X'] or x[1] in ['2', 'X']} NORTH_DIGITS =", "(4, 1.0) ''' An area of 0.05° x 0.05°. The", "size ''' return None return {address+base for address in set_of_addresses", "if detectedTileSize == None: print(tileAddress) raise Exception(\"Invalid tile address\") self.tile_size", "list construction keeps directions in the order above ''' uppercase_input_directions", "corresponds to a * valid {@link com.google.openlocationcode.OpenLocationCode} after removing *", "== self.getTileAddress() def isNeighbor(self, potentialNeighbor): '''/** * Check if a", "BASE_20_BORDER_SET if x[1] == 'X'} SOUTH_DIGITS = {x for x", "direction in directions: lat_diff, long_diff = direction_dict.get(direction) ''' //OLC constructor", "0.05) ''' An area of 0.0025° x 0.0025°. The side", "x in BASE_20_BORDER_SET if x[1] == 'X'} SOUTH_DIGITS = {x", "Exception(\"eight_point_direction must be of type list or str\") if eight_point_direction", "not */''' # //if A contains B, then B's address", "6 characters long.''' DISTRICT = (6, 0.05) ''' An area", "self.getTileAddress() if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible", "west_set all_border_set = north_set | east_set | south_set | west_set", "{ \"NW\": [+1, -1], \"N\": [+1, 0], \"NE\": [+1, +1],", "PADDING_6 + SEPARATOR if len(tileAddress) == TileSize.REGION.getCodeLength(): detectedTileSize = TileSize.REGION", "wrapping '''/** * The exact {@link com.google.openlocationcode.OpenLocationCode} wrapped by this", "constructor clips and normalizes, //so we don't have to deal", "let's do it once, and try to reduce by swaping", "tile_size): ''' Creates a new OpenGeoTile from an existing {@link", "globe, but can be up to approximately 2200km. Tile addresses", "if index == -1: raise Exception(\"Character does not exist in", "OpenGeoTile; */''' return self.tile_address def getTileAddressPrefix(self): '''/** * The prefix", "x in CODE_ALPHABET for y in CODE_ALPHABET} BASE_20_BORDER_SET = {x", "\"SW\": [-1, -1], \"S\": [-1, 0], \"SE\": [-1, +1], }", "globe, but can be up to approximately 275m. Tile addresses", "characters; tile size is * determined by the length of", "''' direction = direction + 'S' if self_tile_y != other_tile_y:", "getCharacterIndex(self, c): '''//following definitions copied from OpenLocationCode.java''' index = \"23456789CFGHJMPQRVWX\".find(c.upper())", "'''/** * Calculates the Chebyshev (chessboard) distance between this and", "if at least one neighbor of the smaller tile, //but", "area as this one. * @param potentialSameTile the OpenGeoTile to", "''' direction = direction + 'N' else: ''' other tile", "TileSize.REGION olcBuilder += tileAddress + PADDING_4 + SEPARATOR if len(tileAddress)", "self.tile_size.getCodeLength()/2 #1..5 tileDistance = 0 for i in range(int(numIterations)): tileDistance", "the globe, but can be up to approximately 14m. Tile", "tile address\") self.tile_size = detectedTileSize self.code = olcBuilder.upper() def getWrappedOpenLocationCode(self):", "@param potentialSameTile the OpenGeoTile to check * @return true if", "{@link com.google.openlocationcode.OpenLocationCode} after padding with an appropriate * number of", "if not self.isNeighbor(neighborTile): raise Exception(\"neighborTile must be neighbor\") if neighborTile.getTileSize()", "absolute_value_bool: return abs(tileDistance) return tileDistance def getLongitudinalTileDistance(self, otherTile, absolute_value_bool): if", "tile_size tile size to use for this OpenGeoTile @throws IllegalArgumentException", "self.getTileSize(): '''//avoid iterating over neighbors for same tile''' if self.isSameTile(potentialNeighbor):", "one * @return an integer value corresponding to the number", "* @param tile_size tile size to use for this OpenGeoTile", "@return an integer value corresponding to the number of tiles", "if CODE_ALPHABET.find(other_tile_y) == 0: ''' other tile is right ->", "from an existing * {@link com.google.openlocationcode.OpenLocationCode}. * @param olc OpenLocationCode", "sizes don't match\") xDiff = int(self.getLongitudinalTileDistance(otherTile, False)) yDiff = int(self.getLatitudinalTileDistance(otherTile,", "tile size to use for this OpenGeoTile @throws IllegalArgumentException when", "return_code_of_tile_size(plus_code, tile_size) self.code = modified_plus_code.upper() self.tile_size = tile_size def constructTileFromLatLong(self,", "not north_base_set: self.memoizeDigitDict(\"N\", iterations_needed) north_set = memoized_digit_dict.get(f\"N{iterations_needed}\") east_set = memoized_digit_dict.get(f\"E{iterations_needed}\",", "return code def return_set_of_subaddresses(set_of_addresses): for address in set_of_addresses: if len(address)", "neighboring tiles of a given one, to calculate a distance", "'S' else: if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x): ''' other tile is", "else: return self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def getParentTileAddress(self): return self.getTileAddressPrefix() def getTileOpenLocationCode(self):", "next_set = {existing_base + base for existing_base in existing_bases for", "range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2] c2 = otherTile.getTileAddress()[i*2]", "of the same size as this one * @return an", "'''/** * Get the {@link TileSize} of this OpenGeoTile. *", "self.isSameTile(new_OpenGeoTile): '''//don't add tiles that are the same as this", "{OpenGeoTile(address + base)} def memoizeDigitDict(self, eight_point_direction, iterations_needed): base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\")", "not */''' if potentialNeighbor.getTileSize() == self.getTileSize(): '''//avoid iterating over neighbors", "south_base = \"\" west_base = \"\" base_tuple_list = re.findall('..', base)", "for tiles near the poles. */''' # deltas = [20.0,", "only one tile''' ordinal_digit_dict = { 'NW': 'X2', 'NE': 'XX',", "A separator used to break the code into two parts", "\"\" else: return self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def getParentTileAddress(self): return self.getTileAddressPrefix() def", "plus_code): '''/** * Creates a new OpenGeoTile from an existing", "due to clipping near the poles''' neighbors.add(new_OpenGeoTile) return neighbors def", "neighbor in neighbors: if potentialNeighbor.isSameTile(neighbor): return True return False else:", "one. * @param potentialNeighbor the OpenGeoTile to check * @return", "''' if CODE_ALPHABET.find(other_tile_y) == 0: ''' other tile is right", "code of the whole tile, see {@link #getTileOpenLocationCode()}. * @return", "another tile of the same size as this one *", "xDiff = int(self.getLongitudinalTileDistance(otherTile, False)) yDiff = int(self.getLatitudinalTileDistance(otherTile, False)) return math.atan2(yDiff,", "this tile contains another one. * @param potentialMember the OpenGeoTile", "= TileSize.PINPOINT.getCodeLength() code = code_address + (\"0\" * (full_length -", "\"2\" if not east_exists: east_set.add(east_base) if not south_exists: south_set.add(south_base) if", "TileSize.PINPOINT: code = code[:-2] + SEPARATOR + code[-2:] else: code", "BASE_20_SET = {x+y for x in CODE_ALPHABET for y in", "be up to approximately 14m. Tile addresses will be 10", "of the same size. * @param otherTile another tile of", "{@link TileSize} of this OpenGeoTile */''' return self.tile_size def getTileAddress(self):", "self.getTileAddress()[i*2] c2 = otherTile.getTileAddress()[i*2] tileDistance += self.characterDistance(c1,c2) if absolute_value_bool: return", "@throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(double, double, int)}", "shorter to go the other way around, do so''' firstDiff", "biggerTile.contains(neighbor): return True return False def contains(self, potentialMember): '''/** *", "OpenGeoTile. * For the plus code of the whole tile,", "passed through from * {@link OpenLocationCode#OpenLocationCode(String)} or thrown if tileAddress", "desired size ''' return self elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size", "+= relevant_digit + \"X\" if not south_exists: south_base += \"2\"", "else: '''//tiles of different size are adjacent if at least", "getParentTileAddress(self): return self.getTileAddressPrefix() def getTileOpenLocationCode(self): # this code is redundant", "* @author <NAME> * @version 0.1.0 */ Ported by scoofy", "if not self.isSameTile(new_OpenGeoTile): '''//don't add tiles that are the same", "TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size = TileSize.NEIGHBORHOOD elif code_length == TileSize.PINPOINT.getCodeLength(): self.tile_size =", "The full {@link com.google.openlocationcode.OpenLocationCode} for this tile. Other than *", "of this OpenGeoTile */''' return self.tile_size def getTileAddress(self): '''/** *", "): if not (code or (code and tile_size) or (lat", "= int(self.getLatitudinalTileDistance(otherTile, False)) return math.atan2(yDiff, xDiff) def getEightPointDirectionOfNeighbor(self, neighborTile): '''", "left -> neighborTile is west ''' direction = direction +", "the empty string. */''' if self.tile_size == TileSize.GLOBAL: return \"\"", "index = \"23456789CFGHJMPQRVWX\".find(c.upper()) if index == -1: raise Exception(\"Character does", "+ \"2\" if not east_exists: east_set.add(east_base) if not south_exists: south_set.add(south_base)", "self.tile_size def getTileAddress(self): '''/** * A tile address is a", "north_set | east_set | south_set | west_set memoized_digit_dict[f\"A{iterations_needed}\"] = all_border_set", "and licensed under the Apache License 2.0. * For more", "x[1] == '2'} memoized_digit_dict = { \"N1\": NORTH_DIGITS, \"E1\": EAST_DIGITS,", "not all_border_set: north_base_set = memoized_digit_dict.get(f\"N{iterations_needed}\") if not north_base_set: self.memoizeDigitDict(\"N\", iterations_needed)", "[0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent", "return_set_of_subaddresses(set_of_addresses): for address in set_of_addresses: if len(address) == TileSize.PINPOINT.getCodeLength(): '''", "in all_border_set} elif len(eight_point_direction) == 1: ''' North, South, East,", "larger TileSize. * @throws IllegalArgumentException if olc is not a", "Copy from OpenLocationCode.java # A separator used to break the", "directions = directions_list elif isinstance(eight_point_direction, str): directions = [] if", "within the area of this tile, including cases * where", "''' direction = direction + 'W' else: if CODE_ALPHABET.find(self_tile_y) <", "base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set =", "* Creates a new OpenGeoTile from lat/long coordinates. * @param", "olcBuilder.upper() def getWrappedOpenLocationCode(self): # this code is effectively redundant as", "code for the whole tile, probably padded with '0' characters", "and an additional number of trailing characters; tile size is", "a new OpenGeoTile from an existing {@link com.google.openlocationcode.OpenLocationCode}. @param olc", "raise Exception(\"Tile sizes don't match\") return self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile,", "Using this wrapper class allows to determine whether two locations", "== 1: ''' North, South, East, or West ''' base_set", "* @return an array of the typically 8 neighboring tiles", "= TileSize.PINPOINT self.code = olc.encode(lat, long, tile_size.getCodeLength()).upper() self.tile_size = tile_size", "if not west_exists: west_base += relevant_digit + \"2\" if not", "tile_size: self.constructTileFromCodeAndSize(code, tile_size) elif code: if is_tile_address(code): self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code)", "be of type list or str\") if eight_point_direction is None:", "= south_set != set() west_exists = west_set != set() for", "len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize = TileSize.NEIGHBORHOOD olcBuilder += tileAddress +", "in range(iterations_needed): base += ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address + base)} def", "-> neighborTile is south ''' direction = direction + 'S'", "self.getTileSize(): raise Exception(\"Tile sizes don't match\") numIterations = self.tile_size.getCodeLength()/2 #1..5", "all_border_set: north_base_set = memoized_digit_dict.get(f\"N{iterations_needed}\") if not north_base_set: self.memoizeDigitDict(\"N\", iterations_needed) north_set", "addresses will be 2 characters long.''' GLOBAL = (2, 20.0)", "True)) def getDirection(self, otherTile): '''/** * Returns the approximate direction", "should only be * interpreted as a very rough approximation", "to clipping near the poles''' neighbors.add(new_OpenGeoTile) return neighbors def isSameTile(self,", "\"0000\" PADDING_6 = \"000000\" CODE_ALPHABET = olc.CODE_ALPHABET_ BASE_20_SET = {x+y", "new OpenGeoTile from an existing * {@link com.google.openlocationcode.OpenLocationCode}. * @param", "base_set} elif len(eight_point_direction) == 2: ''' NW, NE, SW, SE...", "OLC, or if OLC has too much padding for given", "getCoordinateIncrement(self): '''get 1th value''' return self.coordinate_increment # Copy from OpenLocationCode.java", "self.getTileSize(): raise Exception(\"Tile sizes don't match\") xDiff = int(self.getLongitudinalTileDistance(otherTile, False))", "= longitude + (delta * long_diff) new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude,", "the other ''' if CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y)", "neighbor of the smaller tile, //but not the smaller tile", "if CODE_ALPHABET.find(other_tile_x) == 0: ''' other tile is above ->", "\"N1\": NORTH_DIGITS, \"E1\": EAST_DIGITS, \"S1\": SOUTH_DIGITS, \"W1\": WEST_DIGITS, } def", "x[0] == 'X'} EAST_DIGITS = {x for x in BASE_20_BORDER_SET", "full code */''' if not olc.isFull(plus_code): raise Exception(\"Only full OLC", "'''/** * Creates a new OpenGeoTile from a tile address.", "this one * @return an angle in radians, 0 being", "this OpenGeoTile * @throws IllegalArgumentException passed through from * {@link", "(8-neighborhood); * false if not */''' if potentialNeighbor.getTileSize() == self.getTileSize():", "@return true if tile sizes and addresses are the same;", "* The exact {@link com.google.openlocationcode.OpenLocationCode} wrapped by this OpenGeoTile. *", "than 8 neighbors for tiles near the poles. */''' #", "don't match\") numIterations = self.tile_size.getCodeLength()/2 #; //1..5 tileDistance = 0", "* @throws IllegalArgumentException if olc is not a full code", "Calculates the Manhattan (city block) distance between this and another", "but can be up to approximately 14m. Tile addresses will", "= TileSize.REGION elif code_length == TileSize.DISTRICT.getCodeLength(): self.tile_size = TileSize.DISTRICT elif", "if CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]:", "+ SEPARATOR if len(tileAddress) == TileSize.PINPOINT.getCodeLength(): detectedTileSize = TileSize.PINPOINT olcBuilder", "the final two characters removed. In case of a GLOBAL", "adjacent (8-neighborhood); * false if not */''' if potentialNeighbor.getTileSize() ==", "else: self.constructTileFromCode(code) self.tile_address = self.code.replace(SEPARATOR, \"\")[0: self.tile_size.getCodeLength()] def constructTileFromCode(self, plus_code):", "to check * @return true if this and potentialNeighbor are", "a large margin of error, especially for big or far", "are in the same or adjacent * \"tiles\", to determine", "etc. * * Open Location Code is a technology developed", "@param olc OpenLocationCode for the current location. This can be", "if self_tile_y != other_tile_y: ''' one tile is above the", "break for i in range(quickest_i, iterations_needed): existing_bases = memoized_digit_dict.get(f\"{eight_point_direction}{i +", "> desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise Exception(\"OLC padding larger", "#getTileOpenLocationCode()}. * @return the exact plus code wrapped by this", "directions.append(eight_point_direction.upper()) else: ''' this list construction keeps directions in the", "far away tiles, so this should only be * interpreted", "self.code = plus_code.upper() if is_padded(plus_code): code_length = plus_code.find(PADDING_CHARACTER) else: code_length", "i in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2 +", "{@link com.google.openlocationcode.OpenLocationCode}. * @param olc OpenLocationCode for the current location.", "* @param potentialMember the OpenGeoTile to check * @return true", "def constructTileFromCode(self, plus_code): '''/** * Creates a new OpenGeoTile from", "more information, see https://github.com/google/open-location-code * * @author <NAME> * @version", "be 6 characters long.''' DISTRICT = (6, 0.05) ''' An", "the Apache License 2.0. * For more information, see https://github.com/google/open-location-code", "tile_size tile size to use for this OpenGeoTile * @throws", "== 0: ''' other tile is right -> neighborTile is", "''' direction = direction + 'W' return direction def getCharacterIndex(self,", "@param potentialNeighbor the OpenGeoTile to check * @return true if", "used to pad codes. PADDING_CHARACTER = '0' PADDING_2 = \"00\"", "int)} */''' if not tile_size: tile_size = TileSize.PINPOINT self.code =", "valid * {@link com.google.openlocationcode.OpenLocationCode} after padding with an appropriate *", "//if it's shorter to go the other way around, do", "getCodeLength(self): '''get 0th value''' return self.code_length def getCoordinateIncrement(self): '''get 1th", "+1, +1, 0, -1, -1, -1, 0] #long_diff = [-1,", "= eight_point_direction.upper() set_of_border_subaddresses = set() if eight_point_direction is None: '''", "\"E1\": EAST_DIGITS, \"S1\": SOUTH_DIGITS, \"W1\": WEST_DIGITS, } def is_padded(plus_code): return", "up to approximately 14m. Tile addresses will be 10 characters", "PADDING_6 = \"000000\" CODE_ALPHABET = olc.CODE_ALPHABET_ BASE_20_SET = {x+y for", "self.tile_size = tile_size def constructTileFromTileAddress(self, tileAddress): '''/** * Creates a", "return tileDistance def getLongitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize():", "be up to approximately 110km. Tile addresses will be 4", "''' return None return {address+base for address in set_of_addresses for", "'X'] or x[1] in ['2', 'X']} NORTH_DIGITS = {x for", "-1, -1] if not type(eight_point_direction) in [type(None), list, str]: raise", "scoofy on 08.31.21 ''' def __init__(self, code=None, tile_size=None, lat=None, long=None,", "= {x for x in BASE_20_SET if x[0] in ['2',", "''' Creates a new OpenGeoTile from an existing {@link com.google.openlocationcode.OpenLocationCode}.", "base += ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address + base)} def memoizeDigitDict(self, eight_point_direction,", "of the location * @param tile_size tile size to use", "base_tuple_list = re.findall('..', base) ''' north will be Xd east", "@throws IllegalArgumentException thrown if otherTile has different {@link TileSize} */'''", "getEightPointDirectionOfNeighbor(self, neighborTile): ''' returns neighbor's direction, to assist in expanding", "''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_y) == 0: '''", "raise Exception(\"OLC padding larger than allowed by desired_tile_size\") code_address =", "[-1, 0], \"SE\": [-1, +1], } #lat_diff = [+1, +1,", "try to reduce by swaping digits ''' all_border_set = memoized_digit_dict.get(f\"A{iterations_needed}\")", "+= \"2\" + relevant_digit if not west_exists: west_base += relevant_digit", "= i break for i in range(quickest_i, iterations_needed): existing_bases =", "the other ''' if CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x)", "will be 2 characters long.''' GLOBAL = (2, 20.0) '''", "can be up to approximately 2200km. Tile addresses will be", "long=neighborLongitude, tile_size=self.getTileSize()) if not self.isSameTile(new_OpenGeoTile): '''//don't add tiles that are", "OLC \"CVXW0000+\" * @return the tile address of this OpenGeoTile;", "-1] if not type(eight_point_direction) in [type(None), list, str]: raise Exception(\"eight_point_direction", "= \"\" west_base = \"\" base_tuple_list = re.findall('..', base) '''", "= {x for x in BASE_20_BORDER_SET if x[0] == 'X'}", "potentialSameTile.getTileAddress() == self.getTileAddress() def isNeighbor(self, potentialNeighbor): '''/** * Check if", "neighbors def isSameTile(self, potentialSameTile): '''/** * Check if a tile", "is * determined by the length of this address *", "return neighbors def isSameTile(self, potentialSameTile): '''/** * Check if a", "code_area.latitudeCenter longitude = code_area.longitudeCenter '''directions_list included to keep ordered data'''", "number of tiles of the given size that need to", "* Get an array of the typically 8 neighboring tiles", "of '0' and '+' characters. Example: Address \"CVXW\" corresponds to", "else: code_length = min(len(plus_code)-1, 10) if code_length == TileSize.GLOBAL.getCodeLength(): self.tile_size", "set()) east_exists = east_set != set() south_exists = south_set !=", "abs(tileDistance) return tileDistance def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength():", "//1..5 tileDistance = 0 for i in range(int(numIterations)): tileDistance *=", "the OpenGeoTile to check * @return true if this and", "str): eight_point_direction = eight_point_direction.upper() set_of_border_subaddresses = set() if eight_point_direction is", "an {@code OpenLocationCode} object, focusing on the area identified by", "coordinates. * @param latitude latitude of the location * @param", "* determined by the length of this address * @throws", "potentialSameTile.getTileSize() != self.getTileSize(): return False return potentialSameTile.getTileAddress() == self.getTileAddress() def", "coordinate_increment def getCodeLength(self): '''get 0th value''' return self.code_length def getCoordinateIncrement(self):", "address as a prefix return potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self, otherTile): '''/**", "between this and another tile of the same size. *", "to * be traversed getting from one to the other", "neighboring tiles of the same size. * @return an array", "if len(tileAddress) == TileSize.PINPOINT.getCodeLength(): detectedTileSize = TileSize.PINPOINT olcBuilder += tileAddress[0:8]", "resulting OpenGeoTile will have a larger TileSize. * @throws IllegalArgumentException", "IllegalArgumentException thrown if otherTile has different {@link TileSize} */''' if", "TileSize.DISTRICT elif code_length == TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size = TileSize.NEIGHBORHOOD elif code_length", "c2 = otherTile.getTileAddress()[i*2 + 1] if i == 0: '''//for", "valid {@link com.google.openlocationcode.OpenLocationCode} after removing * '+' and an additional", "[type(None), list, str]: raise Exception(\"eight_point_direction must be of type list", "def __init__(self, code_length, coordinate_increment): self.code_length = code_length self.coordinate_increment = coordinate_increment", "if not is_tile_address(code): code = code.replace(SEPARATOR, '') if is_padded(code): if", "= TileSize.DISTRICT elif code_length == TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size = TileSize.NEIGHBORHOOD elif", "return potentialSameTile.getTileAddress() == self.getTileAddress() def isNeighbor(self, potentialNeighbor): '''/** * Check", "other_tile_x: ''' one tile is above the other ''' if", "it's shorter to go the other way around, do so'''", "-1: raise Exception(\"Character does not exist in alphabet\") return index", "can be a padded code, in which * case the", "value can * have a large margin of error, especially", "(2, 20.0) ''' An area of 1° x 1°. The", "larger than allowed by desired_tile_size\") code_address = code[:desired_tile_size.getCodeLength()] full_length =", "def isSameTile(self, potentialSameTile): '''/** * Check if a tile describes", "TileSize.PINPOINT.getCodeLength() code = code_address + (\"0\" * (full_length - len(code_address)))", "sizes don't match\") return self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile, True) def", "[-1, 0, +1, +1, +1, 0, -1, -1] if not", "= self.tile_size.getCodeLength()/2 #1..5 tileDistance = 0 for i in range(int(numIterations)):", "but can be up to approximately 2200km. Tile addresses will", "the whole tile. * @return a plus code for the", "0], \"NE\": [+1, +1], \"W\": [ 0, -1], \"E\": [", "@return an array of the typically 8 neighboring tiles of", "self else: smallerTile = self biggerTile = potentialNeighbor if biggerTile.contains(smallerTile):", "much padding for given tile_size ''' if not olc.isFull(plus_code): raise", "c1 = self.getTileAddress()[i*2] c2 = otherTile.getTileAddress()[i*2] tileDistance += self.characterDistance(c1,c2) if", "size ''' return None elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is", "to calculate a distance in tiles etc. * * Open", "*/''' return self.tile_size def getTileAddress(self): '''/** * A tile address", "over neighbors for same tile''' if self.isSameTile(potentialNeighbor): return False neighbors", "itself, is contained within the bigger tile''' if potentialNeighbor.getTileSize().getCodeLength() >", "def getParentTileAddress(self): return self.getTileAddressPrefix() def getTileOpenLocationCode(self): # this code is", "biggerTile = self else: smallerTile = self biggerTile = potentialNeighbor", "all borders ''' ''' traveling salesman problem ''' ''' let's", "<NAME> * @version 0.1.0 */ Ported by scoofy on 08.31.21", "+ PADDING_2 + SEPARATOR if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize =", "tile_size = TileSize.PINPOINT self.code = olc.encode(lat, long, tile_size.getCodeLength()).upper() self.tile_size =", "@param latitude latitude of the location * @param longitude longitude", "SEPARATOR + tileAddress[8:10] if detectedTileSize == None: print(tileAddress) raise Exception(\"Invalid", "= n_tuple[1] if not east_exists: east_base += relevant_digit + \"X\"", "be 2 characters long.''' GLOBAL = (2, 20.0) ''' An", "if olc is not a full code */''' if not", "-1, -1, -1, 0] #long_diff = [-1, 0, +1, +1,", "the same size. * @return an array of the typically", "OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize()) if not self.isSameTile(new_OpenGeoTile): '''//don't add tiles that", "def constructTileFromLatLong(self, lat: float, long: float, tile_size=None): '''/** * Creates", "TileSize} of this OpenGeoTile */''' return self.tile_size def getTileAddress(self): '''/**", "determine all neighboring tiles of a given one, to calculate", "in CODE_ALPHABET} BASE_20_BORDER_SET = {x for x in BASE_20_SET if", "tile_size) elif code: if is_tile_address(code): self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code) self.tile_address =", "* Get the {@link TileSize} of this OpenGeoTile. * @return", "code_length == TileSize.REGION.getCodeLength(): self.tile_size = TileSize.REGION elif code_length == TileSize.DISTRICT.getCodeLength():", "keep ordered data''' directions_list = [\"NW\", \"N\", \"NE\", \"E\", \"SE\",", "it once, and try to reduce by swaping digits '''", "= memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}')", "potentialNeighbor.isSameTile(neighbor): return True return False else: '''//tiles of different size", "direction + 'W' return direction def getCharacterIndex(self, c): '''//following definitions", "as such. * @param otherTile another tile of the same", "padding larger than allowed by desired_tile_size\") iterations_needed = int(desired_tile_size.getCodeLength()/2 -", "is_tile_address(plus_code): return plus_code.find(SEPARATOR) == -1 def return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code =", "full OLC supported. Use recover().\") modified_plus_code = return_code_of_tile_size(plus_code, tile_size) self.code", "else: code = code[:-2] + SEPARATOR return code def return_set_of_subaddresses(set_of_addresses):", "elif code and tile_size: self.constructTileFromCodeAndSize(code, tile_size) elif code: if is_tile_address(code):", "east_set.add(east_base) if not south_exists: south_set.add(south_base) if not west_exists: west_set.add(west_base) memoized_digit_dict[f\"E{iterations_needed}\"]", "= memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\") next_set = {existing_base + base for", "!= self.getTileSize(): raise Exception(\"Tile sizes don't match\") return self.getLatitudinalTileDistance(otherTile, True)", "firstDiff > 0: firstDiff -= NUM_CHARACTERS_USED else: firstDiff += NUM_CHARACTERS_USED", "= olc.encode(lat, long, tile_size.getCodeLength()).upper() self.tile_size = tile_size def constructTileFromTileAddress(self, tileAddress):", "code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength(): raise Exception(\"OLC padding larger than allowed by", "def getLatitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile", "probably padded with '0' characters */''' return self.getWrappedOpenLocationCode() def getNeighbors(self,", "be Xd east dX south 2d west d2''' for n_tuple", "@throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(String)} or thrown", "\"N\": [+1, 0], \"NE\": [+1, +1], \"W\": [ 0, -1],", "* {@link OpenLocationCode#OpenLocationCode(String)} or thrown if tileAddress is of *", "is desired size ''' return self elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength():", "which * case the resulting OpenGeoTile will have a larger", "<gh_stars>0 from openlocationcode import openlocationcode as olc from enum import", "= code.replace(SEPARATOR, '') if is_padded(code): if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength(): raise", "minimum possible size ''' return None return {address+base for address", "the length of this address * @throws IllegalArgumentException passed through", "@param tile_size tile size to use for this OpenGeoTile *", "list or str\") if eight_point_direction is None: directions = directions_list", "tiles of the same size. * @return an array of", "will be Xd east dX south 2d west d2''' for", "''' if CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in [0,", "this wrapper class allows to determine whether two locations are", "in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_y)", "''' returns neighbor's direction, to assist in expanding tile areas", "TileSize.NEIGHBORHOOD elif code_length == TileSize.PINPOINT.getCodeLength(): self.tile_size = TileSize.PINPOINT else: raise", "size as this one * @return an integer value corresponding", "tileAddress[0:8] + SEPARATOR + tileAddress[8:10] if detectedTileSize == None: print(tileAddress)", "iterations_needed) base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address + base) for base", "tile relative to this. The return value can * have", "c2): return self.getCharacterIndex(c1) - self.getCharacterIndex(c2) def getLatitudinalTileDistance(self, otherTile, absolute_value_bool): if", "SOUTH_DIGITS, \"W1\": WEST_DIGITS, } def is_padded(plus_code): return plus_code.find(PADDING_CHARACTER) != -1", "not base_set: quickest_i = 0 for i in reversed(range(iterations_needed)): if", "full_length = TileSize.PINPOINT.getCodeLength() code = code_address + (\"0\" * (full_length", "tileAddress): '''/** * Creates a new OpenGeoTile from a tile", "if a tile is neighboring this one. * @param potentialNeighbor", "tile itself, is contained within the bigger tile''' if potentialNeighbor.getTileSize().getCodeLength()", "275m. Tile addresses will be 8 characters long.''' NEIGHBORHOOD =", "otherTile): '''/** * Returns the approximate direction of the other", "if desired_tile_size == TileSize.PINPOINT: code = code[:-2] + SEPARATOR +", "BASE_20_BORDER_SET = {x for x in BASE_20_SET if x[0] in", "'') if is_padded(code): if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength(): raise Exception(\"OLC padding", "in radians, 0 being an eastward direction, +/- PI being", "'N' else: ''' other tile is below -> neighborTile is", "a valid * {@link com.google.openlocationcode.OpenLocationCode} after padding with an appropriate", "tileAddress[8:10] if detectedTileSize == None: print(tileAddress) raise Exception(\"Invalid tile address\")", "set_of_border_subaddresses = set() if eight_point_direction is None: ''' all borders", "not is_tile_address(code): code = code.replace(SEPARATOR, '') if is_padded(code): if code.find(PADDING_CHARACTER)", "* Creates a new OpenGeoTile from a tile address. *", "if x[0] == '2'} WEST_DIGITS = {x for x in", "numIterations = self.tile_size.getCodeLength()/2 #1..5 tileDistance = 0 for i in", "South, East, or West ''' base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not", "the other tile relative to this. The return value can", "plus_code.find(SEPARATOR) == -1 def return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code = too_precise_plus_code if", "east_base = \"\" south_base = \"\" west_base = \"\" base_tuple_list", "self.tile_address = self.code.replace(SEPARATOR, \"\")[0: self.tile_size.getCodeLength()] def constructTileFromCode(self, plus_code): '''/** *", "contained within the bigger tile''' if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength(): smallerTile", "detectedTileSize = TileSize.GLOBAL olcBuilder += tileAddress + PADDING_6 + SEPARATOR", "to the number of tiles of the given size that", "west_exists: west_base += relevant_digit + \"2\" if not east_exists: east_set.add(east_base)", "integer value corresponding to the number of tiles of the", "= self.getTileAddress()[-2] self_tile_y = self.getTileAddress()[-1] other_tile_x = neighborTile.getTileAddress()[-2] other_tile_y =", "an additional number of trailing characters; tile size is *", "by a prefix * of the given OpenLocationCode. * *", "[0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_x) ==", "the approximate direction of the other tile relative to this.", "TileSize. * @throws IllegalArgumentException if olc is not a full", "the poles. */''' # deltas = [20.0, 1.0, 0.05, 0.0025,", "length */''' detectedTileSize = None olcBuilder = \"\" if len(tileAddress)", "base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address + base) for base in", "Creates a new OpenGeoTile from an existing {@link com.google.openlocationcode.OpenLocationCode}. @param", "= TileSize.REGION olcBuilder += tileAddress + PADDING_4 + SEPARATOR if", "OpenGeoTile to check * @return true if this and potentialNeighbor", "be 8 characters long.''' NEIGHBORHOOD = (8, 0.0025) ''' An", "code = too_precise_plus_code if not is_tile_address(code): code = code.replace(SEPARATOR, '')", "''' one tile is above the other ''' if CODE_ALPHABET.find(self_tile_x)", "from OpenLocationCode.java''' index = \"23456789CFGHJMPQRVWX\".find(c.upper()) if index == -1: raise", "+1], \"W\": [ 0, -1], \"E\": [ 0, +1], \"SW\":", "this OpenGeoTile; */''' return self.tile_address def getTileAddressPrefix(self): '''/** * The", "== TileSize.REGION.getCodeLength(): self.tile_size = TileSize.REGION elif code_length == TileSize.DISTRICT.getCodeLength(): self.tile_size", "x 1°. The side length of this tile varies with", "size to use for this OpenGeoTile * @throws IllegalArgumentException passed", "west_exists = west_set != set() for base in north_set: east_base", "code def return_set_of_subaddresses(set_of_addresses): for address in set_of_addresses: if len(address) ==", "= return_code_of_tile_size(plus_code, tile_size) self.code = modified_plus_code.upper() self.tile_size = tile_size def", "self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile, True) def getChebyshevTileDistanceTo(self, otherTile): '''/** *", "0.0025) ''' An area of 0.000125° x 0.000125°. The side", "= olc.CODE_ALPHABET_ BASE_20_SET = {x+y for x in CODE_ALPHABET for", "tiles ''' if CODE_ALPHABET.find(other_tile_x) == 0: ''' other tile is", "A wrapper around an {@code OpenLocationCode} object, focusing on the", "''' traveling salesman problem ''' ''' let's do it once,", "''' uppercase_input_directions = [d.upper() for d in eight_point_direction] directions =", "tileAddress a tile address is a [2/4/6/8/10]-character string that corresponds", "a tile address. * @param tileAddress a tile address is", "this one * @return an integer value corresponding to the", "== -1 def return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code = too_precise_plus_code if not", "\"X\" if not south_exists: south_base += \"2\" + relevant_digit if", "NUM_CHARACTERS_USED/2: if firstDiff > 0: firstDiff -= NUM_CHARACTERS_USED else: firstDiff", "is None: directions = directions_list elif isinstance(eight_point_direction, str): directions =", "code.replace(SEPARATOR, '') if is_padded(code): if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength(): raise Exception(\"OLC", "included to keep ordered data''' directions_list = [\"NW\", \"N\", \"NE\",", "in which * case the resulting OpenGeoTile will have a", "elif len(eight_point_direction) == 1: ''' North, South, East, or West", "x 0.0025°. The side length of this tile varies with", "math.atan2(yDiff, xDiff) def getEightPointDirectionOfNeighbor(self, neighborTile): ''' returns neighbor's direction, to", "[] if eight_point_direction.upper() in directions_list: directions.append(eight_point_direction.upper()) else: ''' this list", "has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise", "= 18 if abs(firstDiff) > NUM_CHARACTERS_USED/2: if firstDiff > 0:", "if not east_exists: east_set.add(east_base) if not south_exists: south_set.add(south_base) if not", "* A wrapper around an {@code OpenLocationCode} object, focusing on", "1° x 1°. The side length of this tile varies", "'0' PADDING_2 = \"00\" PADDING_4 = \"0000\" PADDING_6 = \"000000\"", "distance in tiles etc. * * Open Location Code is", "characters. Example: Address \"CVXW\" corresponds to OLC \"CVXW0000+\" * @return", "string. */''' if self.tile_size == TileSize.GLOBAL: return \"\" else: return", "distance between this and another tile of the same size.", "== -1: raise Exception(\"Character does not exist in alphabet\") return", "for base in BASE_20_SET} class OpenGeoTile(): ''' /** * A", "the location * @param longitude longitude of the location *", "this address * @throws IllegalArgumentException passed through from * {@link", "be a padded code, in which * case the resulting", "Tile addresses will be 10 characters long.''' PINPOINT = (10,", "elif code_length == TileSize.PINPOINT.getCodeLength(): self.tile_size = TileSize.PINPOINT else: raise Exception(\"Too", "x[1] == 'X'} SOUTH_DIGITS = {x for x in BASE_20_BORDER_SET", "the globe, but can be up to approximately 5.5km. Tile", "tileAddress + PADDING_6 + SEPARATOR if len(tileAddress) == TileSize.REGION.getCodeLength(): detectedTileSize", "to reduce by swaping digits ''' all_border_set = memoized_digit_dict.get(f\"A{iterations_needed}\") if", "desired_tile_size\") iterations_needed = int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2) north_set = set() east_set", "tile is above -> neighborTile is north ''' direction =", "*/''' return self.tile_address def getTileAddressPrefix(self): '''/** * The prefix of", "= set() if eight_point_direction is None: ''' all borders '''", "IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(double, double, int)} */'''", "in set_of_addresses for base in BASE_20_SET} class OpenGeoTile(): ''' /**", "{@link com.google.openlocationcode.OpenLocationCode} wrapped by this OpenGeoTile. * For the plus", "the area of this tile, including cases * where both", "in north_set: east_base = \"\" south_base = \"\" west_base =", "* {@link #getWrappedOpenLocationCode()}, this will return a full plus code", "com.google.openlocationcode.OpenLocationCode}. * @param olc OpenLocationCode for the current location. This", "when trying to pass a short (non-full) OLC, or if", "*/''' if not tile_size: tile_size = TileSize.PINPOINT self.code = olc.encode(lat,", "''' all_border_set = memoized_digit_dict.get(f\"A{iterations_needed}\") if not all_border_set: north_base_set = memoized_digit_dict.get(f\"N{iterations_needed}\")", "a * valid {@link com.google.openlocationcode.OpenLocationCode} after removing * '+' and", "for i in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2", "+ SEPARATOR + tileAddress[8:10] if detectedTileSize == None: print(tileAddress) raise", "Exception(\"Invalid OpenGeoTile constructor arguments\") if lat and long: self.constructTileFromLatLong(lat, long,", "redundant as python has no wrapping '''/** * The exact", "direction * @throws IllegalArgumentException thrown if otherTile has different {@link", "memoized_digit_dict.get(f\"S{iterations_needed}\", set()) west_set = memoized_digit_dict.get(f\"W{iterations_needed}\", set()) east_exists = east_set !=", "memorability. SEPARATOR = '+' # Copy from OpenLocationCode.java # The", "CODE_ALPHABET} BASE_20_BORDER_SET = {x for x in BASE_20_SET if x[0]", "given one, to calculate a distance in tiles etc. *", "at this location. * @return this tile's address with the", "on the globe, but can be up to approximately 110km.", "one. * @param potentialMember the OpenGeoTile to check * @return", "different size are adjacent if at least one neighbor of", "set() for direction in directions: lat_diff, long_diff = direction_dict.get(direction) '''", "exact plus code wrapped by this OpenGeoTile */''' return self.code", "8 neighboring tiles of the same size. * @return an", "range(quickest_i, iterations_needed): existing_bases = memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\") next_set = {existing_base", "supported. Use olc.recoverNearest().\") self.code = plus_code.upper() if is_padded(plus_code): code_length =", "#getWrappedOpenLocationCode()}, this will return a full plus code for the", "construction keeps directions in the order above ''' uppercase_input_directions =", "tile is neighboring this one. * @param potentialNeighbor the OpenGeoTile", "= TileSize.NEIGHBORHOOD olcBuilder += tileAddress + SEPARATOR if len(tileAddress) ==", "to take care of wrapping - basically, //if it's shorter", "redundant '''/** * The full {@link com.google.openlocationcode.OpenLocationCode} for this tile.", "+ (\"0\" * (full_length - len(code_address))) if desired_tile_size == TileSize.PINPOINT:", "self elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise", "under the Apache License 2.0. * For more information, see", "* @version 0.1.0 */ Ported by scoofy on 08.31.21 '''", "least one neighbor of the smaller tile, //but not the", "else: direction = direction + 'S' else: if CODE_ALPHABET.find(self_tile_x) <", "com.google.openlocationcode.OpenLocationCode} after padding with an appropriate * number of '0'", "+ (delta * long_diff) new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize()) if", "this one. * @param potentialSameTile the OpenGeoTile to check *", "\"W\": [ 0, -1], \"E\": [ 0, +1], \"SW\": [-1,", "one neighbor of the smaller tile, //but not the smaller", "ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_y) == 0: ''' other", "recover().\") modified_plus_code = return_code_of_tile_size(plus_code, tile_size) self.code = modified_plus_code.upper() self.tile_size =", "= \"\" south_base = \"\" west_base = \"\" base_tuple_list =", "An area of 1° x 1°. The side length of", "@return true if this and potentialNeighbor are adjacent (8-neighborhood); *", "tile varies with its location on the globe, but can", "corresponding to the number of tiles of the given size", "+ PADDING_4 + SEPARATOR if len(tileAddress) == TileSize.DISTRICT.getCodeLength(): detectedTileSize =", "the resulting OpenGeoTile will have a larger TileSize. * @throws", "big or far away tiles, so this should only be", "com.google.openlocationcode.OpenLocationCode} for this tile. Other than * {@link #getWrappedOpenLocationCode()}, this", "neighborTile.getTileAddress()[-1] direction = \"\" north_south = None if self_tile_x !=", "and long)): raise Exception(\"Invalid OpenGeoTile constructor arguments\") if lat and", "iterations_needed): existing_bases = memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\") next_set = {existing_base +", "double, int)} */''' if not tile_size: tile_size = TileSize.PINPOINT self.code", "is_tile_address(code): code = code.replace(SEPARATOR, '') if is_padded(code): if code.find(PADDING_CHARACTER) <", "Example: Address \"CVXW\" corresponds to OLC \"CVXW0000+\" * @return the", "which corresponds to a valid * {@link com.google.openlocationcode.OpenLocationCode} after padding", "clips and normalizes, //so we don't have to deal with", "to approximately 14m. Tile addresses will be 10 characters long.'''", "this one due to clipping near the poles''' neighbors.add(new_OpenGeoTile) return", "lat and long: self.constructTileFromLatLong(lat, long, tile_size) elif code and tile_size:", "*= 20 c1 = self.getTileAddress()[i*2 + 1] c2 = otherTile.getTileAddress()[i*2", "characters long.''' GLOBAL = (2, 20.0) ''' An area of", "self.code = olcBuilder.upper() def getWrappedOpenLocationCode(self): # this code is effectively", "a full plus code for the whole tile. * @return", "* lat_diff) neighborLongitude = longitude + (delta * long_diff) new_OpenGeoTile", "-1, 0] #long_diff = [-1, 0, +1, +1, +1, 0,", "\"CVXW\" corresponds to OLC \"CVXW0000+\" * @return the tile address", "SEPARATOR return code def return_set_of_subaddresses(set_of_addresses): for address in set_of_addresses: if", "the plus code of the whole tile, see {@link #getTileOpenLocationCode()}.", "@param otherTile another tile of the same size as this", "already minimum possible size ''' return None return {address+base for", "to determine all neighboring tiles of a given one, to", "= \"000000\" CODE_ALPHABET = olc.CODE_ALPHABET_ BASE_20_SET = {x+y for x", "a plus code for the whole tile, probably padded with", "corresponds to a valid * {@link com.google.openlocationcode.OpenLocationCode} after padding with", "longitude = code_area.longitudeCenter '''directions_list included to keep ordered data''' directions_list", "after padding with an appropriate * number of '0' and", "address of the next biggest tile at this location. *", "* @return an angle in radians, 0 being an eastward", "code=None, tile_size=None, lat=None, long=None, ): if not (code or (code", "olc OpenLocationCode for the current location @param tile_size tile size", "typically 8 neighboring tiles of the same size. * @return", "be traversed getting from one to the other tile *", "i break for i in range(quickest_i, iterations_needed): existing_bases = memoized_digit_dict.get(f\"{eight_point_direction}{i", "in BASE_20_SET} class OpenGeoTile(): ''' /** * A wrapper around", "[\"NW\", \"N\", \"NE\", \"E\", \"SE\", \"S\", \"SW\", \"W\"] direction_dict =", "tile is above the other ''' if CODE_ALPHABET.find(self_tile_y) in [0,", "= 18 #; //360°/20° = 18 if abs(firstDiff) > NUM_CHARACTERS_USED/2:", "if code_length == TileSize.GLOBAL.getCodeLength(): self.tile_size = TileSize.GLOBAL elif code_length ==", "code = code.replace(SEPARATOR, '') if is_padded(code): if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength():", "20° x 20°. The side length of this tile varies", "NORTH_DIGITS, \"E1\": EAST_DIGITS, \"S1\": SOUTH_DIGITS, \"W1\": WEST_DIGITS, } def is_padded(plus_code):", "if is_padded(plus_code): code_length = plus_code.find(PADDING_CHARACTER) else: code_length = min(len(plus_code)-1, 10)", "2 characters long.''' GLOBAL = (2, 20.0) ''' An area", "if not east_exists: east_base += relevant_digit + \"X\" if not", "to assist in expanding tile areas ''' if not self.isNeighbor(neighborTile):", "self.tile_size.getCodeLength()/2) north_set = set() east_set = set() south_set = set()", "area of this tile, including cases * where both are", "all_border_set = memoized_digit_dict.get(f\"A{iterations_needed}\") if not all_border_set: north_base_set = memoized_digit_dict.get(f\"N{iterations_needed}\") if", "\"\" if len(tileAddress) == TileSize.GLOBAL.getCodeLength(): detectedTileSize = TileSize.GLOBAL olcBuilder +=", "olcBuilder += tileAddress + SEPARATOR if len(tileAddress) == TileSize.PINPOINT.getCodeLength(): detectedTileSize", "{@link TileSize} of this OpenGeoTile. * @return the {@link TileSize}", "{@link com.google.openlocationcode.OpenLocationCode} after removing * '+' and an additional number", "def getCodeLength(self): '''get 0th value''' return self.code_length def getCoordinateIncrement(self): '''get", "be 10 characters long.''' PINPOINT = (10, 0.000125) def __init__(self,", "@return the {@link TileSize} of this OpenGeoTile */''' return self.tile_size", "\"S\", \"SW\", \"W\"] direction_dict = { \"NW\": [+1, -1], \"N\":", "for this tile. Other than * {@link #getWrappedOpenLocationCode()}, this will", "OpenLocationCode.java # A separator used to break the code into", "later\") def constructTileFromCodeAndSize(self, plus_code, tile_size): ''' Creates a new OpenGeoTile", "traveling salesman problem ''' ''' let's do it once, and", "= latitude + (delta * lat_diff) neighborLongitude = longitude +", "once, and try to reduce by swaping digits ''' all_border_set", "(delta * lat_diff) neighborLongitude = longitude + (delta * long_diff)", "long: float, tile_size=None): '''/** * Creates a new OpenGeoTile from", "* @param potentialNeighbor the OpenGeoTile to check * @return true", "two parts to aid memorability. SEPARATOR = '+' # Copy", "block) distance between this and another tile of the same", "0, -1, -1] if not type(eight_point_direction) in [type(None), list, str]:", "[+1, -1], \"N\": [+1, 0], \"NE\": [+1, +1], \"W\": [", "direction + 'N' else: ''' other tile is below ->", "for this OpenGeoTile @throws IllegalArgumentException when trying to pass a", "'''/** * A tile address is a string of length", "if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength(): ''' tile is desired size '''", "''' ''' traveling salesman problem ''' ''' let's do it", "8 neighboring tiles of the same size; * may return", "this later\") def constructTileFromCodeAndSize(self, plus_code, tile_size): ''' Creates a new", "olcBuilder += tileAddress + PADDING_6 + SEPARATOR if len(tileAddress) ==", "memoized_digit_dict[f\"E{iterations_needed}\"] = east_set memoized_digit_dict[f\"S{iterations_needed}\"] = south_set memoized_digit_dict[f\"W{iterations_needed}\"] = west_set all_border_set", "from OpenLocationCode.java # The character used to pad codes. PADDING_CHARACTER", "0: ''' other tile is right -> neighborTile is east", "getting from one to the other tile * @throws IllegalArgumentException", "The side length of this tile varies with its location", "enum import Enum import math, re class TileSize(Enum): ''' An", "''' one tile is above the other ''' if CODE_ALPHABET.find(self_tile_y)", "of a tile address is the address of the next", "range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2 + 1] c2", "right -> neighborTile is east ''' direction = direction +", "firstDiff = self.characterDistance(c1, c2) NUM_CHARACTERS_USED = 18 #; //360°/20° =", "at least one neighbor of the smaller tile, //but not", "is_padded(code): if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength(): raise Exception(\"OLC padding larger than", "with invalid lat/long values directly''' neighborLatitude = latitude + (delta", "return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code = too_precise_plus_code if not is_tile_address(code): code =", "we need to take care of wrapping - basically, //if", "direction def getCharacterIndex(self, c): '''//following definitions copied from OpenLocationCode.java''' index", "locations are in the same or adjacent * \"tiles\", to", "desired_tile_size\") iterations_needed = desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2 address_set = set([self.getTileAddress()]) for", "very rough approximation and used as such. * @param otherTile", "0th value''' return self.code_length def getCoordinateIncrement(self): '''get 1th value''' return", "{address+base for address in set_of_addresses for base in BASE_20_SET} class", "= \"\" if len(tileAddress) == TileSize.GLOBAL.getCodeLength(): detectedTileSize = TileSize.GLOBAL olcBuilder", "is redundant '''/** * The full {@link com.google.openlocationcode.OpenLocationCode} for this", "addresses are the same; false if not */''' if potentialSameTile.getTileSize()", "\"NW\": [+1, -1], \"N\": [+1, 0], \"NE\": [+1, +1], \"W\":", "raise Exception(\"OLC padding larger than allowed by desired_tile_size\") iterations_needed =", "of this OpenGeoTile. * @return the {@link TileSize} of this", "OpenLocationCode for the current location @param tile_size tile size to", "for neighbor in neighbors: if potentialNeighbor.isSameTile(neighbor): return True return False", "new OpenGeoTile from a tile address. * @param tileAddress a", "existing_bases for base in memoized_digit_dict.get(f\"{eight_point_direction}1\")} memoized_digit_dict[f\"{eight_point_direction}{i + 2}\"] = next_set", "python has no wrapping '''/** * The exact {@link com.google.openlocationcode.OpenLocationCode}", "'''//don't add tiles that are the same as this one", "north_set = memoized_digit_dict.get(f\"N{iterations_needed}\") east_set = memoized_digit_dict.get(f\"E{iterations_needed}\", set()) south_set = memoized_digit_dict.get(f\"S{iterations_needed}\",", "is west ''' direction = direction + 'W' else: if", "tile size is * determined by the length of this", "desired_tile_size): code = too_precise_plus_code if not is_tile_address(code): code = code.replace(SEPARATOR,", "delta = self.getTileSize().getCoordinateIncrement() code_area = olc.decode(self.code) latitude = code_area.latitudeCenter longitude", "20 c1 = self.getTileAddress()[i*2 + 1] c2 = otherTile.getTileAddress()[i*2 +", "float, tile_size=None): '''/** * Creates a new OpenGeoTile from lat/long", "return a full plus code for the whole tile. *", "= set() west_set = set() if isinstance(eight_point_direction, str): eight_point_direction =", "full OLC supported. Use olc.recoverNearest().\") self.code = plus_code.upper() if is_padded(plus_code):", "self.tile_size = TileSize.NEIGHBORHOOD elif code_length == TileSize.PINPOINT.getCodeLength(): self.tile_size = TileSize.PINPOINT", "< desired_tile_size.getCodeLength(): raise Exception(\"OLC padding larger than allowed by desired_tile_size\")", "+/- PI being westward direction * @throws IllegalArgumentException thrown if", "in directions_list if direction in uppercase_input_directions] neighbors = set() for", "a short (non-full) OLC, or if OLC has too much", "- self.tile_size.getCodeLength()/2 address_set = set([self.getTileAddress()]) for i in range(int(iterations_needed)): address_set", "up to approximately 2200km. Tile addresses will be 2 characters", "@version 0.1.0 */ Ported by scoofy on 08.31.21 ''' def", "length 2, 4, 6, 8, or 10, which corresponds to", "getTileSize(self): '''/** * Get the {@link TileSize} of this OpenGeoTile.", "plus_code.find(PADDING_CHARACTER) else: code_length = min(len(plus_code)-1, 10) if code_length == TileSize.GLOBAL.getCodeLength():", "\"000000\" CODE_ALPHABET = olc.CODE_ALPHABET_ BASE_20_SET = {x+y for x in", "a very rough approximation and used as such. * @param", "is north ''' direction = direction + 'N' else: '''", "def characterDistance(self, c1, c2): return self.getCharacterIndex(c1) - self.getCharacterIndex(c2) def getLatitudinalTileDistance(self,", "-1 def return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code = too_precise_plus_code if not is_tile_address(code):", "same as this one due to clipping near the poles'''", "as a prefix return potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self, otherTile): '''/** *", "* Calculates the Chebyshev (chessboard) distance between this and another", "set() if isinstance(eight_point_direction, str): eight_point_direction = eight_point_direction.upper() set_of_border_subaddresses = set()", "'W' else: if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y): ''' other tile is", "below -> neighborTile is south ''' direction = direction +", "eight_point_direction, iterations_needed): base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: quickest_i =", "''' base_set = memoized_digit_dict.get(f\"{eight_point_direction}{iterations_needed}\") if not base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set", "'2'} WEST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1]", "location on the globe, but can be up to approximately", "@return a plus code for the whole tile, probably padded", "neighborLongitude = longitude + (delta * long_diff) new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude,", "can * have a large margin of error, especially for", "WEST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] ==", "(6, 0.05) ''' An area of 0.0025° x 0.0025°. The", "an appropriate * number of '0' and '+' characters. Example:", "Location Code is a technology developed by Google and licensed", "north ''' direction = direction + 'N' else: direction =", "= self.getTileAddress()[i*2 + 1] c2 = otherTile.getTileAddress()[i*2 + 1] if", "''' other tile is right -> neighborTile is east '''", "otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\") numIterations =", "as this one due to clipping near the poles''' neighbors.add(new_OpenGeoTile)", "= desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2 address_set = set([self.getTileAddress()]) for i in", "is a [2/4/6/8/10]-character string that corresponds to a * valid", "eight_point_direction=None): address = self.getTileAddress() if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address", "for address in set_of_addresses: if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address", "0 being an eastward direction, +/- PI being westward direction", "'''//tiles of different size are adjacent if at least one", "| west_set memoized_digit_dict[f\"A{iterations_needed}\"] = all_border_set return {OpenGeoTile(address+base) for base in", "raise Exception(\"Only full OLC supported. Use recover().\") modified_plus_code = return_code_of_tile_size(plus_code,", "location * @param tile_size tile size to use for this", "above -> neighborTile is north ''' direction = direction +", "separator used to break the code into two parts to", "['2', 'X']} NORTH_DIGITS = {x for x in BASE_20_BORDER_SET if", "desired_tile_size\") code_address = code[:desired_tile_size.getCodeLength()] full_length = TileSize.PINPOINT.getCodeLength() code = code_address", "0.1.0 */ Ported by scoofy on 08.31.21 ''' def __init__(self,", "OpenGeoTile */''' return self.tile_size def getTileAddress(self): '''/** * A tile", "NW, NE, SW, SE... should return only one tile''' ordinal_digit_dict", "lat/long coordinates. * @param latitude latitude of the location *", "= code_length self.coordinate_increment = coordinate_increment def getCodeLength(self): '''get 0th value'''", "absolute_value_bool: return abs(tileDistance) return tileDistance def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength()", "return plus_code.find(SEPARATOR) == -1 def return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code = too_precise_plus_code", "constructTileFromLatLong(self, lat: float, long: float, tile_size=None): '''/** * Creates a", "of the given OpenLocationCode. * * Using this wrapper class", "6, 8, or 10, which corresponds to a valid *", "west_set.add(west_base) memoized_digit_dict[f\"E{iterations_needed}\"] = east_set memoized_digit_dict[f\"S{iterations_needed}\"] = south_set memoized_digit_dict[f\"W{iterations_needed}\"] = west_set", "if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize = TileSize.NEIGHBORHOOD olcBuilder += tileAddress", "self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address + base) for", "is above -> neighborTile is north ''' direction = direction", "tile_set def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address = self.getTileAddress() if len(address)", "passed through from * {@link OpenLocationCode#OpenLocationCode(double, double, int)} */''' if", "= direction + 'S' else: if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x): '''", "potentialNeighbor biggerTile = self else: smallerTile = self biggerTile =", "raise Exception(\"Tile sizes don't match\") return max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile, True))", "TileSize.GLOBAL elif code_length == TileSize.REGION.getCodeLength(): self.tile_size = TileSize.REGION elif code_length", "check * @return true if tile sizes and addresses are", "{@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes", "basically, //if it's shorter to go the other way around,", "= [] if eight_point_direction.upper() in directions_list: directions.append(eight_point_direction.upper()) else: ''' this", "self.tile_size = tile_size def constructTileFromLatLong(self, lat: float, long: float, tile_size=None):", "potentialSameTile): '''/** * Check if a tile describes the same", "elif code_length == TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size = TileSize.NEIGHBORHOOD elif code_length ==", "NORTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] ==", "True) def getChebyshevTileDistanceTo(self, otherTile): '''/** * Calculates the Chebyshev (chessboard)", "if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y): ''' other tile is right ->", "code is effectively redundant as python has no wrapping '''/**", "south_exists: south_set.add(south_base) if not west_exists: west_set.add(west_base) memoized_digit_dict[f\"E{iterations_needed}\"] = east_set memoized_digit_dict[f\"S{iterations_needed}\"]", "Exception(\"Invalid tile address\") self.tile_size = detectedTileSize self.code = olcBuilder.upper() def", "not self.isSameTile(new_OpenGeoTile): '''//don't add tiles that are the same as", "tile address. * @param tileAddress a tile address is a", "(non-full) OLC, or if OLC has too much padding for", "code_length = plus_code.find(PADDING_CHARACTER) else: code_length = min(len(plus_code)-1, 10) if code_length", "neighbors = self.getNeighbors() for neighbor in neighbors: if potentialNeighbor.isSameTile(neighbor): return", "getTileAddressPrefix(self): '''/** * The prefix of a tile address is", "def getDirection(self, otherTile): '''/** * Returns the approximate direction of", "= west_set != set() for base in north_set: east_base =", "above the other ''' if CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1] and", "return None elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big'", "+ tileAddress[8:10] if detectedTileSize == None: print(tileAddress) raise Exception(\"Invalid tile", "exact {@link com.google.openlocationcode.OpenLocationCode} wrapped by this OpenGeoTile. * For the", "above ''' uppercase_input_directions = [d.upper() for d in eight_point_direction] directions", "Exception(\"Tile sizes don't match\") return max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile, True)) def", "= memoized_digit_dict.get(f\"A{iterations_needed}\") if not all_border_set: north_base_set = memoized_digit_dict.get(f\"N{iterations_needed}\") if not", "(full_length - len(code_address))) if desired_tile_size == TileSize.PINPOINT: code = code[:-2]", "[-1, -1], \"S\": [-1, 0], \"SE\": [-1, +1], } #lat_diff", "1°. The side length of this tile varies with its", "= self.getNeighbors() for neighbor in neighbors: if potentialNeighbor.isSameTile(neighbor): return True", "raise Exception(\"Tile sizes don't match\") numIterations = self.tile_size.getCodeLength()/2 #; //1..5", "= {OpenGeoTile(address) for address in address_set} return tile_set def returnSetOfBorderSubtiles(self,", "n_tuple[1] if not east_exists: east_base += relevant_digit + \"X\" if", "ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address + base)} def memoizeDigitDict(self, eight_point_direction, iterations_needed): base_set", "+ base for existing_base in existing_bases for base in memoized_digit_dict.get(f\"{eight_point_direction}1\")}", "*/ Ported by scoofy on 08.31.21 ''' def __init__(self, code=None,", "east_set | south_set | west_set memoized_digit_dict[f\"A{iterations_needed}\"] = all_border_set return {OpenGeoTile(address+base)", "not olc.isFull(plus_code): raise Exception(\"Only full OLC supported. Use recover().\") modified_plus_code", "possible size ''' return None return {address+base for address in", "not olc.isFull(plus_code): raise Exception(\"Only full OLC supported. Use olc.recoverNearest().\") self.code", "memoized_digit_dict.get(f\"E{iterations_needed}\", set()) south_set = memoized_digit_dict.get(f\"S{iterations_needed}\", set()) west_set = memoized_digit_dict.get(f\"W{iterations_needed}\", set())", "[direction for direction in directions_list if direction in uppercase_input_directions] neighbors", "return value can * have a large margin of error,", "direction + 'W' else: if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y): ''' other", "OpenGeoTile will have a larger TileSize. * @throws IllegalArgumentException if", "in directions: lat_diff, long_diff = direction_dict.get(direction) ''' //OLC constructor clips", "tile contains another one. * @param potentialMember the OpenGeoTile to", "code[-2:] else: code = code[:-2] + SEPARATOR return code def", "-> neighborTile is east ''' direction = direction + 'E'", "the other way around, do so''' firstDiff = self.characterDistance(c1, c2)", "* '+' and an additional number of trailing characters; tile", "other tile * @throws IllegalArgumentException thrown if otherTile has different", "do it once, and try to reduce by swaping digits", "that need to * be traversed getting from one to", "interpreted as a very rough approximation and used as such.", "def returnCode(self): return self.code def getTileSize(self): '''/** * Get the", "as olc from enum import Enum import math, re class", "if not */''' if potentialSameTile.getTileSize() != self.getTileSize(): return False return", "SEPARATOR = '+' # Copy from OpenLocationCode.java # The character", "def getCoordinateIncrement(self): '''get 1th value''' return self.coordinate_increment # Copy from", "\"CVXW0000+\" * @return the tile address of this OpenGeoTile; */'''", "determine whether two locations are in the same or adjacent", "other tile is left -> neighborTile is west ''' direction", "from an existing {@link com.google.openlocationcode.OpenLocationCode}. @param olc OpenLocationCode for the", "''' def __init__(self, code=None, tile_size=None, lat=None, long=None, ): if not", "i == 0: '''//for the first longitudinal value, we need", "TileSize.GLOBAL.getCodeLength(): detectedTileSize = TileSize.GLOBAL olcBuilder += tileAddress + PADDING_6 +", "around an {@code OpenLocationCode} object, focusing on the area identified", "* @return true if this and potentialNeighbor are adjacent (8-neighborhood);", "\"\" north_south = None if self_tile_x != other_tile_x: ''' one", "TileSize.GLOBAL.getCodeLength(): self.tile_size = TileSize.GLOBAL elif code_length == TileSize.REGION.getCodeLength(): self.tile_size =", "of tiles of the given size that need to *", "(code or (code and tile_size) or (lat and long)): raise", "east_set != set() south_exists = south_set != set() west_exists =", "eight_point_direction.upper() set_of_border_subaddresses = set() if eight_point_direction is None: ''' all", "north_base_set = memoized_digit_dict.get(f\"N{iterations_needed}\") if not north_base_set: self.memoizeDigitDict(\"N\", iterations_needed) north_set =", "Get an array of the typically 8 neighboring tiles of", "invalid length */''' detectedTileSize = None olcBuilder = \"\" if", "tile sizes and addresses are the same; false if not", "def getEightPointDirectionOfNeighbor(self, neighborTile): ''' returns neighbor's direction, to assist in", "= west_set all_border_set = north_set | east_set | south_set |", "of error, especially for big or far away tiles, so", "memoized_digit_dict.get(f\"A{iterations_needed}\") if not all_border_set: north_base_set = memoized_digit_dict.get(f\"N{iterations_needed}\") if not north_base_set:", "same size as this one * @return an integer value", "address_set} return tile_set def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address = self.getTileAddress()", "west_base = \"\" base_tuple_list = re.findall('..', base) ''' north will", "[0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_y) ==", "additional number of trailing characters; tile size is * determined", "[-1, +1], } #lat_diff = [+1, +1, +1, 0, -1,", "\"23456789CFGHJMPQRVWX\".find(c.upper()) if index == -1: raise Exception(\"Character does not exist", "used to break the code into two parts to aid", "base) ''' north will be Xd east dX south 2d", "* * Using this wrapper class allows to determine whether", "potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength(): smallerTile = potentialNeighbor biggerTile = self else:", "* @return true if tile sizes and addresses are the", "given size that need to * be traversed getting from", "2: ''' NW, NE, SW, SE... should return only one", "'SE': '2X', 'SW': '22' } base = '' for i", "return self.getCharacterIndex(c1) - self.getCharacterIndex(c2) def getLatitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize()", "other ''' if CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in", "4, 6, 8, or 10, which corresponds to a valid", "neighbor's direction, to assist in expanding tile areas ''' if", "!= set() for base in north_set: east_base = \"\" south_base", "* {@link com.google.openlocationcode.OpenLocationCode}. * @param olc OpenLocationCode for the current", "whether two locations are in the same or adjacent *", "= 0 for i in range(int(numIterations)): tileDistance *= 20 c1", "tiles of the given size that need to * be", "address with the final two characters removed. In case of", "An area of 20° x 20°. The side length of", "[2/4/6/8/10]-character string that corresponds to a * valid {@link com.google.openlocationcode.OpenLocationCode}", "tile describes the same area as this one. * @param", "str\") if eight_point_direction is None: directions = directions_list elif isinstance(eight_point_direction,", "code_length self.coordinate_increment = coordinate_increment def getCodeLength(self): '''get 0th value''' return", "return self.code_length def getCoordinateIncrement(self): '''get 1th value''' return self.coordinate_increment #", "size that need to * be traversed getting from one", "after removing * '+' and an additional number of trailing", "Exception(\"neighborTile must be neighbor\") if neighborTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile", "location. This can be a padded code, in which *", "Enum import math, re class TileSize(Enum): ''' An area of", "'+' and an additional number of trailing characters; tile size", "be up to approximately 275m. Tile addresses will be 8", "on the globe, but can be up to approximately 14m.", "Copy from OpenLocationCode.java # The character used to pad codes.", "self.getNeighbors() for neighbor in neighbors: if potentialNeighbor.isSameTile(neighbor): return True return", "if lat and long: self.constructTileFromLatLong(lat, long, tile_size) elif code and", "too much padding for given tile_size ''' if not olc.isFull(plus_code):", "same; false if not */''' # //if A contains B,", "'NE': 'XX', 'SE': '2X', 'SW': '22' } base = ''", "normalizes, //so we don't have to deal with invalid lat/long", "smaller tile, //but not the smaller tile itself, is contained", "= [\"NW\", \"N\", \"NE\", \"E\", \"SE\", \"S\", \"SW\", \"W\"] direction_dict", "from * {@link OpenLocationCode#OpenLocationCode(String)} or thrown if tileAddress is of", "== TileSize.PINPOINT: code = code[:-2] + SEPARATOR + code[-2:] else:", "IllegalArgumentException if olc is not a full code */''' if", "return tile_set def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address = self.getTileAddress() if", "* Calculates the Manhattan (city block) distance between this and", "= self.getTileAddress()[i*2] c2 = otherTile.getTileAddress()[i*2] tileDistance += self.characterDistance(c1,c2) if absolute_value_bool:", "will return a full plus code for the whole tile.", "* Open Location Code is a technology developed by Google", "and addresses are the same; false if not */''' if", "other tile is below -> neighborTile is south ''' direction", "else: smallerTile = self biggerTile = potentialNeighbor if biggerTile.contains(smallerTile): return", "= None if self_tile_x != other_tile_x: ''' one tile is", "[+1, +1, +1, 0, -1, -1, -1, 0] #long_diff =", "is north ''' direction = direction + 'N' else: direction", "''' direction = direction + 'E' else: ''' other tile", "south_set.add(south_base) if not west_exists: west_set.add(west_base) memoized_digit_dict[f\"E{iterations_needed}\"] = east_set memoized_digit_dict[f\"S{iterations_needed}\"] =", "getManhattanTileDistanceTo(self, otherTile): '''/** * Calculates the Manhattan (city block) distance", "self.getTileAddress()[i*2 + 1] c2 = otherTile.getTileAddress()[i*2 + 1] if i", "if i == 0: '''//for the first longitudinal value, we", "'desired_tile_size is too big' raise Exception(\"OLC padding larger than allowed", "TileSize.PINPOINT.getCodeLength(): detectedTileSize = TileSize.PINPOINT olcBuilder += tileAddress[0:8] + SEPARATOR +", "in base_tuple_list: relevant_digit = n_tuple[1] if not east_exists: east_base +=", "PI being westward direction * @throws IllegalArgumentException thrown if otherTile", "None: ''' all borders ''' ''' traveling salesman problem '''", "@throws IllegalArgumentException if olc is not a full code */'''", "getTileOpenLocationCode(self): # this code is redundant '''/** * The full", "x[0] in ['2', 'X'] or x[1] in ['2', 'X']} NORTH_DIGITS", "tiles near the poles. */''' # deltas = [20.0, 1.0,", "another tile of the same size. * @param otherTile another", "two locations are in the same or adjacent * \"tiles\",", "match\") return self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile, True) def getChebyshevTileDistanceTo(self, otherTile):", "self.getTileSize(): raise Exception(\"Tile sizes don't match\") self_tile_x = self.getTileAddress()[-2] self_tile_y", "biggerTile = potentialNeighbor if biggerTile.contains(smallerTile): return False neighbors = smallerTile.getNeighbors()", "in eight_point_direction] directions = [direction for direction in directions_list if", "other_tile_y = neighborTile.getTileAddress()[-1] direction = \"\" north_south = None if", "of the same size; * may return less than 8", "minimum possible size ''' return None elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength():", "has no wrapping '''/** * The exact {@link com.google.openlocationcode.OpenLocationCode} wrapped", "= {x for x in BASE_20_BORDER_SET if x[1] == 'X'}", "in ['2', 'X']} NORTH_DIGITS = {x for x in BASE_20_BORDER_SET", "an array of the typically 8 neighboring tiles of the", "and tile_size: self.constructTileFromCodeAndSize(code, tile_size) elif code: if is_tile_address(code): self.constructTileFromTileAddress(code) else:", "08.31.21 ''' def __init__(self, code=None, tile_size=None, lat=None, long=None, ): if", "+ 'N' else: ''' other tile is below -> neighborTile", "CODE_ALPHABET.find(other_tile_x): ''' other tile is above -> neighborTile is north", "in address_set} return tile_set def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address =", "TileSize.GLOBAL: return \"\" else: return self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def getParentTileAddress(self): return", "= return_set_of_subaddresses(address_set) tile_set = {OpenGeoTile(address) for address in address_set} return", "location. * @return this tile's address with the final two", "borders ''' ''' traveling salesman problem ''' ''' let's do", "directions_list: directions.append(eight_point_direction.upper()) else: ''' this list construction keeps directions in", "Chebyshev (chessboard) distance between this and another tile of the", "be * interpreted as a very rough approximation and used", "dX south 2d west d2''' for n_tuple in base_tuple_list: relevant_digit", "true if tile sizes and addresses are the same; false", "parent tiles ''' if CODE_ALPHABET.find(other_tile_y) == 0: ''' other tile", "'''/** * Check if a tile describes the same area", "long.''' REGION = (4, 1.0) ''' An area of 0.05°", "not base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address +", "self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise Exception(\"OLC padding", "* Check if a tile describes the same area as", "return self.getTileAddressPrefix() def getTileOpenLocationCode(self): # this code is redundant '''/**", "+ self.getLongitudinalTileDistance(otherTile, True) def getChebyshevTileDistanceTo(self, otherTile): '''/** * Calculates the", "''' if CODE_ALPHABET.find(other_tile_x) == 0: ''' other tile is above", "return tileDistance def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength(): '''", "in range(quickest_i, iterations_needed): existing_bases = memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\") next_set =", "GLOBAL tile, * returns the empty string. */''' if self.tile_size", "# this code is effectively redundant as python has no", "eight_point_direction.upper() in directions_list: directions.append(eight_point_direction.upper()) else: ''' this list construction keeps", "else: ''' other tile is left -> neighborTile is west", "= None olcBuilder = \"\" if len(tileAddress) == TileSize.GLOBAL.getCodeLength(): detectedTileSize", "south ''' direction = direction + 'S' if self_tile_y !=", "if tileAddress is of * invalid length */''' detectedTileSize =", "not (code or (code and tile_size) or (lat and long)):", "def getTileAddress(self): '''/** * A tile address is a string", "== 'X'} EAST_DIGITS = {x for x in BASE_20_BORDER_SET if", "''' if not self.isNeighbor(neighborTile): raise Exception(\"neighborTile must be neighbor\") if", "self.tile_size.getCodeLength()/2 #; //1..5 tileDistance = 0 for i in range(int(numIterations)):", "'''get 1th value''' return self.coordinate_increment # Copy from OpenLocationCode.java #", "in reversed(range(iterations_needed)): if memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\"): quickest_i = i break", "tile''' if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength(): smallerTile = potentialNeighbor biggerTile =", "SEPARATOR if len(tileAddress) == TileSize.REGION.getCodeLength(): detectedTileSize = TileSize.REGION olcBuilder +=", "the tile address of this OpenGeoTile; */''' return self.tile_address def", "* The prefix of a tile address is the address", "-1, -1, 0] #long_diff = [-1, 0, +1, +1, +1,", "current location @param tile_size tile size to use for this", "this tile varies with its location on the globe, but", "'SW': '22' } base = '' for i in range(iterations_needed):", "'X']} NORTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0]", "= all_border_set return {OpenGeoTile(address+base) for base in all_border_set} elif len(eight_point_direction)", "near the poles. */''' # deltas = [20.0, 1.0, 0.05,", "OpenGeoTile(): ''' /** * A wrapper around an {@code OpenLocationCode}", "self.getTileSize(): raise Exception(\"Tile sizes don't match\") return self.getLatitudinalTileDistance(otherTile, True) +", "addresses will be 10 characters long.''' PINPOINT = (10, 0.000125)", "less than 8 neighbors for tiles near the poles. */'''", "from enum import Enum import math, re class TileSize(Enum): '''", "characterDistance(self, c1, c2): return self.getCharacterIndex(c1) - self.getCharacterIndex(c2) def getLatitudinalTileDistance(self, otherTile,", "if biggerTile.contains(smallerTile): return False neighbors = smallerTile.getNeighbors() for neighbor in", "''' address already minimum possible size ''' return None return", "> self.tile_size.getCodeLength(): smallerTile = potentialNeighbor biggerTile = self else: smallerTile", "code_length == TileSize.GLOBAL.getCodeLength(): self.tile_size = TileSize.GLOBAL elif code_length == TileSize.REGION.getCodeLength():", "self.tile_size = TileSize.DISTRICT elif code_length == TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size = TileSize.NEIGHBORHOOD", "long_diff) new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize()) if not self.isSameTile(new_OpenGeoTile): '''//don't", "code_address + (\"0\" * (full_length - len(code_address))) if desired_tile_size ==", "\"NE\": [+1, +1], \"W\": [ 0, -1], \"E\": [ 0,", "print(tileAddress) raise Exception(\"Invalid tile address\") self.tile_size = detectedTileSize self.code =", "tileDistance *= 20 c1 = self.getTileAddress()[i*2 + 1] c2 =", "+ (delta * lat_diff) neighborLongitude = longitude + (delta *", "final two characters removed. In case of a GLOBAL tile,", "- len(code_address))) if desired_tile_size == TileSize.PINPOINT: code = code[:-2] +", "+ \"X\" if not south_exists: south_base += \"2\" + relevant_digit", "raise Exception(\"Tile sizes don't match\") xDiff = int(self.getLongitudinalTileDistance(otherTile, False)) yDiff", "from OpenLocationCode.java # A separator used to break the code", "directions: lat_diff, long_diff = direction_dict.get(direction) ''' //OLC constructor clips and", "if absolute_value_bool: return abs(tileDistance) return tileDistance def getLongitudinalTileDistance(self, otherTile, absolute_value_bool):", "but can be up to approximately 275m. Tile addresses will", "olcBuilder += tileAddress[0:8] + SEPARATOR + tileAddress[8:10] if detectedTileSize ==", "a prefix * of the given OpenLocationCode. * * Using", "def getManhattanTileDistanceTo(self, otherTile): '''/** * Calculates the Manhattan (city block)", "* @param longitude longitude of the location * @param tile_size", "* @return the {@link TileSize} of this OpenGeoTile */''' return", "this code is effectively redundant as python has no wrapping", "= 0 for i in reversed(range(iterations_needed)): if memoized_digit_dict.get(f\"{eight_point_direction}{i + 1}\"):", "8, or 10, which corresponds to a valid * {@link", "else: if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x): ''' other tile is above", "1}\"): quickest_i = i break for i in range(quickest_i, iterations_needed):", "def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address = self.getTileAddress() if len(address) ==", "otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't", "that corresponds to a * valid {@link com.google.openlocationcode.OpenLocationCode} after removing", "#long_diff = [-1, 0, +1, +1, +1, 0, -1, -1]", "not a full code */''' if not olc.isFull(plus_code): raise Exception(\"Only", "set() west_set = set() if isinstance(eight_point_direction, str): eight_point_direction = eight_point_direction.upper()", "location @param tile_size tile size to use for this OpenGeoTile", "BASE_20_SET if x[0] in ['2', 'X'] or x[1] in ['2',", "d in eight_point_direction] directions = [direction for direction in directions_list", "len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_y) == 0:", "longitude of the location * @param tile_size tile size to", "== 0: ''' other tile is above -> neighborTile is", "of the typically 8 neighboring tiles of the same size.", "don't match\") numIterations = self.tile_size.getCodeLength()/2 #1..5 tileDistance = 0 for", "area of 1° x 1°. The side length of this", "tileAddress is of * invalid length */''' detectedTileSize = None", "Use olc.recoverNearest().\") self.code = plus_code.upper() if is_padded(plus_code): code_length = plus_code.find(PADDING_CHARACTER)", "with its location on the globe, but can be up", "in CODE_ALPHABET for y in CODE_ALPHABET} BASE_20_BORDER_SET = {x for", "one. * @param potentialSameTile the OpenGeoTile to check * @return", "''' this list construction keeps directions in the order above", "!= self.getTileSize(): raise Exception(\"Tile sizes don't match\") self_tile_x = self.getTileAddress()[-2]", "= modified_plus_code.upper() self.tile_size = tile_size def constructTileFromLatLong(self, lat: float, long:", "of 1° x 1°. The side length of this tile", "Exception(\"Tile sizes don't match\") numIterations = self.tile_size.getCodeLength()/2 #; //1..5 tileDistance", "In case of a GLOBAL tile, * returns the empty", "is east ''' direction = direction + 'E' else: '''", "+= tileAddress + PADDING_4 + SEPARATOR if len(tileAddress) == TileSize.DISTRICT.getCodeLength():", "sizes don't match\") return max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile, True)) def getDirection(self,", "detectedTileSize = TileSize.PINPOINT olcBuilder += tileAddress[0:8] + SEPARATOR + tileAddress[8:10]", "can be up to approximately 110km. Tile addresses will be", "IllegalArgumentException when trying to pass a short (non-full) OLC, or", "for big or far away tiles, so this should only", "of wrapping - basically, //if it's shorter to go the", "TileSize.REGION.getCodeLength(): self.tile_size = TileSize.REGION elif code_length == TileSize.DISTRICT.getCodeLength(): self.tile_size =", "self_tile_y != other_tile_y: ''' one tile is above the other", "= (4, 1.0) ''' An area of 0.05° x 0.05°.", "or 10, which corresponds to a valid * {@link com.google.openlocationcode.OpenLocationCode}", "} def is_padded(plus_code): return plus_code.find(PADDING_CHARACTER) != -1 def is_tile_address(plus_code): return", "self.constructTileFromCode(code) self.tile_address = self.code.replace(SEPARATOR, \"\")[0: self.tile_size.getCodeLength()] def constructTileFromCode(self, plus_code): '''/**", "long_diff = direction_dict.get(direction) ''' //OLC constructor clips and normalizes, //so", "tileDistance def getLongitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise", "neighbors = smallerTile.getNeighbors() for neighbor in neighbors: if biggerTile.contains(neighbor): return", "or adjacent * \"tiles\", to determine all neighboring tiles of", "+ SEPARATOR + code[-2:] else: code = code[:-2] + SEPARATOR", "openlocationcode as olc from enum import Enum import math, re", "way around, do so''' firstDiff = self.characterDistance(c1, c2) NUM_CHARACTERS_USED =", "self_tile_x != other_tile_x: ''' one tile is above the other", "plus code of the whole tile, see {@link #getTileOpenLocationCode()}. *", "= code_area.latitudeCenter longitude = code_area.longitudeCenter '''directions_list included to keep ordered", "''' let's do it once, and try to reduce by", "if len(tileAddress) == TileSize.GLOBAL.getCodeLength(): detectedTileSize = TileSize.GLOBAL olcBuilder += tileAddress", "//if A contains B, then B's address has A's address", "code_area = olc.decode(self.code) latitude = code_area.latitudeCenter longitude = code_area.longitudeCenter '''directions_list", "constructTileFromCodeAndSize(self, plus_code, tile_size): ''' Creates a new OpenGeoTile from an", "varies with its location on the globe, but can be", "isSameTile(self, potentialSameTile): '''/** * Check if a tile describes the", "one tile is above the other ''' if CODE_ALPHABET.find(self_tile_x) in", "approximately 14m. Tile addresses will be 10 characters long.''' PINPOINT", "20 c1 = self.getTileAddress()[i*2] c2 = otherTile.getTileAddress()[i*2] tileDistance += self.characterDistance(c1,c2)", "OpenGeoTile from lat/long coordinates. * @param latitude latitude of the", "set() if eight_point_direction is None: ''' all borders ''' '''", "the {@link TileSize} of this OpenGeoTile. * @return the {@link", "no wrapping '''/** * The exact {@link com.google.openlocationcode.OpenLocationCode} wrapped by", "location * @param longitude longitude of the location * @param", "(chessboard) distance between this and another tile of the same", "== TileSize.GLOBAL: return \"\" else: return self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def getParentTileAddress(self):", "+1], } #lat_diff = [+1, +1, +1, 0, -1, -1,", "= direction + 'W' else: if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y): '''", "code into two parts to aid memorability. SEPARATOR = '+'", "'X'} SOUTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0]", "as python has no wrapping '''/** * The exact {@link", "= memoized_digit_dict.get(f\"N{iterations_needed}\") if not north_base_set: self.memoizeDigitDict(\"N\", iterations_needed) north_set = memoized_digit_dict.get(f\"N{iterations_needed}\")", "eastward direction, +/- PI being westward direction * @throws IllegalArgumentException", "= { \"N1\": NORTH_DIGITS, \"E1\": EAST_DIGITS, \"S1\": SOUTH_DIGITS, \"W1\": WEST_DIGITS,", "'''/** * The full {@link com.google.openlocationcode.OpenLocationCode} for this tile. Other", "potentialMember): '''/** * Check if this tile contains another one.", "x 0.000125°. The side length of this tile varies with", "for address in set_of_addresses for base in BASE_20_SET} class OpenGeoTile():", "Exception(\"Tile sizes don't match\") xDiff = int(self.getLongitudinalTileDistance(otherTile, False)) yDiff =", "effectively redundant as python has no wrapping '''/** * The", "traversed getting from one to the other tile * @throws", "for this OpenGeoTile * @throws IllegalArgumentException passed through from *", "Tile addresses will be 2 characters long.''' GLOBAL = (2,", "all_border_set return {OpenGeoTile(address+base) for base in all_border_set} elif len(eight_point_direction) ==", "c): '''//following definitions copied from OpenLocationCode.java''' index = \"23456789CFGHJMPQRVWX\".find(c.upper()) if", "base in BASE_20_SET} class OpenGeoTile(): ''' /** * A wrapper", "'''/** * Check if this tile contains another one. *", "self.coordinate_increment = coordinate_increment def getCodeLength(self): '''get 0th value''' return self.code_length", "= min(len(plus_code)-1, 10) if code_length == TileSize.GLOBAL.getCodeLength(): self.tile_size = TileSize.GLOBAL", "+ 1] c2 = otherTile.getTileAddress()[i*2 + 1] if i ==", "west_set memoized_digit_dict[f\"A{iterations_needed}\"] = all_border_set return {OpenGeoTile(address+base) for base in all_border_set}", "east_base += relevant_digit + \"X\" if not south_exists: south_base +=", "*= 20 c1 = self.getTileAddress()[i*2] c2 = otherTile.getTileAddress()[i*2] tileDistance +=", "Xd east dX south 2d west d2''' for n_tuple in", "this tile's address with the final two characters removed. In", "approximately 2200km. Tile addresses will be 2 characters long.''' GLOBAL", "tile_size.getCodeLength()).upper() self.tile_size = tile_size def constructTileFromTileAddress(self, tileAddress): '''/** * Creates", "@param potentialMember the OpenGeoTile to check * @return true if", "is_tile_address(code): self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code) self.tile_address = self.code.replace(SEPARATOR, \"\")[0: self.tile_size.getCodeLength()] def", "if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\") xDiff", "has A's address as a prefix return potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self,", "8 characters long.''' NEIGHBORHOOD = (8, 0.0025) ''' An area", "on 08.31.21 ''' def __init__(self, code=None, tile_size=None, lat=None, long=None, ):", "'''//for the first longitudinal value, we need to take care", "to use for this OpenGeoTile * @throws IllegalArgumentException passed through", "def isNeighbor(self, potentialNeighbor): '''/** * Check if a tile is", "EAST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] ==", "a tile address is the address of the next biggest", "PINPOINT = (10, 0.000125) def __init__(self, code_length, coordinate_increment): self.code_length =", "= [d.upper() for d in eight_point_direction] directions = [direction for", "(delta * long_diff) new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize()) if not", "= too_precise_plus_code if not is_tile_address(code): code = code.replace(SEPARATOR, '') if", "is a string of length 2, 4, 6, 8, or", "area of 0.05° x 0.05°. The side length of this", "getLatitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes", "if is_padded(code): if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength(): raise Exception(\"OLC padding larger", "is left -> neighborTile is west ''' direction = direction", "if potentialSameTile.getTileSize() != self.getTileSize(): return False return potentialSameTile.getTileAddress() == self.getTileAddress()", "CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]: '''", "the Manhattan (city block) distance between this and another tile", "detectedTileSize = None olcBuilder = \"\" if len(tileAddress) == TileSize.GLOBAL.getCodeLength():", "= code[:-2] + SEPARATOR + code[-2:] else: code = code[:-2]", "tile * @throws IllegalArgumentException thrown if otherTile has different {@link", "don't match\") xDiff = int(self.getLongitudinalTileDistance(otherTile, False)) yDiff = int(self.getLatitudinalTileDistance(otherTile, False))", "values directly''' neighborLatitude = latitude + (delta * lat_diff) neighborLongitude", "smallerTile = self biggerTile = potentialNeighbor if biggerTile.contains(smallerTile): return False", "prefix of a tile address is the address of the", "NE, SW, SE... should return only one tile''' ordinal_digit_dict =", "to keep ordered data''' directions_list = [\"NW\", \"N\", \"NE\", \"E\",", "tile, //but not the smaller tile itself, is contained within", "{existing_base + base for existing_base in existing_bases for base in", "{@link #getTileOpenLocationCode()}. * @return the exact plus code wrapped by", "will be 8 characters long.''' NEIGHBORHOOD = (8, 0.0025) '''", "detectedTileSize = TileSize.DISTRICT olcBuilder += tileAddress + PADDING_2 + SEPARATOR", "expanding tile areas ''' if not self.isNeighbor(neighborTile): raise Exception(\"neighborTile must", "PADDING_2 = \"00\" PADDING_4 = \"0000\" PADDING_6 = \"000000\" CODE_ALPHABET", "be neighbor\") if neighborTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't", "new OpenGeoTile from lat/long coordinates. * @param latitude latitude of", "self.isSameTile(potentialNeighbor): return False neighbors = self.getNeighbors() for neighbor in neighbors:", "to check * @return true if the area potentialMember falls", "'X'} EAST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1]", "len(tileAddress) == TileSize.PINPOINT.getCodeLength(): detectedTileSize = TileSize.PINPOINT olcBuilder += tileAddress[0:8] +", "B's address has A's address as a prefix return potentialMember.getTileAddress().startswith(self.getTileAddress())", "adjacent * \"tiles\", to determine all neighboring tiles of a", "have a larger TileSize. * @throws IllegalArgumentException if olc is", "developed by Google and licensed under the Apache License 2.0.", "the whole tile, probably padded with '0' characters */''' return", "address of this OpenGeoTile; */''' return self.tile_address def getTileAddressPrefix(self): '''/**", "0, +1], \"SW\": [-1, -1], \"S\": [-1, 0], \"SE\": [-1,", "in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_x)", "= \"\" north_south = None if self_tile_x != other_tile_x: '''", "if x[0] == 'X'} EAST_DIGITS = {x for x in", "only be * interpreted as a very rough approximation and", "the OpenGeoTile to check * @return true if the area", "tiles of a given one, to calculate a distance in", "OLC supported. Use recover().\") modified_plus_code = return_code_of_tile_size(plus_code, tile_size) self.code =", "direction_dict.get(direction) ''' //OLC constructor clips and normalizes, //so we don't", "* interpreted as a very rough approximation and used as", "set()) south_set = memoized_digit_dict.get(f\"S{iterations_needed}\", set()) west_set = memoized_digit_dict.get(f\"W{iterations_needed}\", set()) east_exists", "tile address is a [2/4/6/8/10]-character string that corresponds to a", "addresses will be 6 characters long.''' DISTRICT = (6, 0.05)", "returns neighbor's direction, to assist in expanding tile areas '''", "relative to this. The return value can * have a", "#; //1..5 tileDistance = 0 for i in range(int(numIterations)): tileDistance", "= self biggerTile = potentialNeighbor if biggerTile.contains(smallerTile): return False neighbors", "Creates a new OpenGeoTile from an existing * {@link com.google.openlocationcode.OpenLocationCode}.", "== TileSize.GLOBAL.getCodeLength(): self.tile_size = TileSize.GLOBAL elif code_length == TileSize.REGION.getCodeLength(): self.tile_size", "= directions_list elif isinstance(eight_point_direction, str): directions = [] if eight_point_direction.upper()", "{@link #getWrappedOpenLocationCode()}, this will return a full plus code for", "len(eight_point_direction) == 2: ''' NW, NE, SW, SE... should return", "neighbors for same tile''' if self.isSameTile(potentialNeighbor): return False neighbors =", "code: if is_tile_address(code): self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code) self.tile_address = self.code.replace(SEPARATOR, \"\")[0:", "see {@link #getTileOpenLocationCode()}. * @return the exact plus code wrapped", "is not a full code */''' if not olc.isFull(plus_code): raise", "- basically, //if it's shorter to go the other way", "margin of error, especially for big or far away tiles,", "''' return self elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too", "olc.recoverNearest().\") self.code = plus_code.upper() if is_padded(plus_code): code_length = plus_code.find(PADDING_CHARACTER) else:", "len(tileAddress) == TileSize.GLOBAL.getCodeLength(): detectedTileSize = TileSize.GLOBAL olcBuilder += tileAddress +", "codes. PADDING_CHARACTER = '0' PADDING_2 = \"00\" PADDING_4 = \"0000\"", "code[:desired_tile_size.getCodeLength()] full_length = TileSize.PINPOINT.getCodeLength() code = code_address + (\"0\" *", "directions_list if direction in uppercase_input_directions] neighbors = set() for direction", "be up to approximately 2200km. Tile addresses will be 2", "REGION = (4, 1.0) ''' An area of 0.05° x", "approximate direction of the other tile relative to this. The", "''' other tile is above -> neighborTile is north '''", "self.characterDistance(c1,c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def getLongitudinalTileDistance(self, otherTile,", "+ 1] if i == 0: '''//for the first longitudinal", "None if self_tile_x != other_tile_x: ''' one tile is above", "math, re class TileSize(Enum): ''' An area of 20° x", "east_exists = east_set != set() south_exists = south_set != set()", "\"S\": [-1, 0], \"SE\": [-1, +1], } #lat_diff = [+1,", "of length 2, 4, 6, 8, or 10, which corresponds", "same size. * @param otherTile another tile of the same", "'+' characters. Example: Address \"CVXW\" corresponds to OLC \"CVXW0000+\" *", "otherTile has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize():", "an angle in radians, 0 being an eastward direction, +/-", "Get the {@link TileSize} of this OpenGeoTile. * @return the", "= OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize()) if not self.isSameTile(new_OpenGeoTile): '''//don't add tiles", "must be neighbor\") if neighborTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes", "otherTile.getTileAddress()[i*2] tileDistance += self.characterDistance(c1,c2) if absolute_value_bool: return abs(tileDistance) return tileDistance", "neighborTile is east ''' direction = direction + 'E' else:", "def getNeighbors(self, eight_point_direction=None): '''/** * Get an array of the", "neighbor\") if neighborTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes don't match\")", "that are the same as this one due to clipping", "A contains B, then B's address has A's address as", "{ \"N1\": NORTH_DIGITS, \"E1\": EAST_DIGITS, \"S1\": SOUTH_DIGITS, \"W1\": WEST_DIGITS, }", "clipping near the poles''' neighbors.add(new_OpenGeoTile) return neighbors def isSameTile(self, potentialSameTile):", "\"S1\": SOUTH_DIGITS, \"W1\": WEST_DIGITS, } def is_padded(plus_code): return plus_code.find(PADDING_CHARACTER) !=", "but can be up to approximately 110km. Tile addresses will", "information, see https://github.com/google/open-location-code * * @author <NAME> * @version 0.1.0", "for existing_base in existing_bases for base in memoized_digit_dict.get(f\"{eight_point_direction}1\")} memoized_digit_dict[f\"{eight_point_direction}{i +", "if x[1] == 'X'} SOUTH_DIGITS = {x for x in", "direction = direction + 'S' if self_tile_y != other_tile_y: '''", "code_area.longitudeCenter '''directions_list included to keep ordered data''' directions_list = [\"NW\",", "x 20°. The side length of this tile varies with", "WEST_DIGITS, } def is_padded(plus_code): return plus_code.find(PADDING_CHARACTER) != -1 def is_tile_address(plus_code):", "the address of the next biggest tile at this location.", "south_exists = south_set != set() west_exists = west_set != set()", "olcBuilder += tileAddress + PADDING_4 + SEPARATOR if len(tileAddress) ==", "return {OpenGeoTile(address + base)} def memoizeDigitDict(self, eight_point_direction, iterations_needed): base_set =", "= TileSize.PINPOINT olcBuilder += tileAddress[0:8] + SEPARATOR + tileAddress[8:10] if", "detectedTileSize == None: print(tileAddress) raise Exception(\"Invalid tile address\") self.tile_size =", "desired_tile_size.getCodeLength(): ''' tile is desired size ''' return self elif", "return plus_code.find(PADDING_CHARACTER) != -1 def is_tile_address(plus_code): return plus_code.find(SEPARATOR) == -1", "current location. This can be a padded code, in which", "\"E\": [ 0, +1], \"SW\": [-1, -1], \"S\": [-1, 0],", "neighbors = set() for direction in directions: lat_diff, long_diff =", "from one to the other tile * @throws IllegalArgumentException thrown", "if abs(firstDiff) > NUM_CHARACTERS_USED/2: if firstDiff > 0: firstDiff -=", "area of 20° x 20°. The side length of this", "tileAddress + PADDING_4 + SEPARATOR if len(tileAddress) == TileSize.DISTRICT.getCodeLength(): detectedTileSize", "uppercase_input_directions = [d.upper() for d in eight_point_direction] directions = [direction", "+ 'W' return direction def getCharacterIndex(self, c): '''//following definitions copied", "base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address + base)", "direction = direction + 'S' else: if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x):", "!= other_tile_x: ''' one tile is above the other '''", "the globe, but can be up to approximately 110km. Tile", "0.0025°. The side length of this tile varies with its", "self.characterDistance(c1, c2) NUM_CHARACTERS_USED = 18 #; //360°/20° = 18 if", "from * {@link OpenLocationCode#OpenLocationCode(double, double, int)} */''' if not tile_size:", "address already minimum possible size ''' return None return {address+base", "especially for big or far away tiles, so this should", "tiles, so this should only be * interpreted as a", "of the given size that need to * be traversed", "* @return true if the area potentialMember falls within the", "return potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self, otherTile): '''/** * Calculates the Manhattan", "value''' return self.coordinate_increment # Copy from OpenLocationCode.java # A separator", "{x for x in BASE_20_SET if x[0] in ['2', 'X']", "return False neighbors = self.getNeighbors() for neighbor in neighbors: if", "= self else: smallerTile = self biggerTile = potentialNeighbor if", "c1, c2): return self.getCharacterIndex(c1) - self.getCharacterIndex(c2) def getLatitudinalTileDistance(self, otherTile, absolute_value_bool):", "if not base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address", "(city block) distance between this and another tile of the", "new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize()) if not self.isSameTile(new_OpenGeoTile): '''//don't add", "such. * @param otherTile another tile of the same size", "getLongitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile sizes", "def getLongitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception(\"Tile", "but can be up to approximately 5.5km. Tile addresses will", "with an appropriate * number of '0' and '+' characters.", "= neighborTile.getTileAddress()[-1] direction = \"\" north_south = None if self_tile_x", "= south_set memoized_digit_dict[f\"W{iterations_needed}\"] = west_set all_border_set = north_set | east_set", "TileSize.REGION.getCodeLength(): detectedTileSize = TileSize.REGION olcBuilder += tileAddress + PADDING_4 +", "for the current location. This can be a padded code,", "18 if abs(firstDiff) > NUM_CHARACTERS_USED/2: if firstDiff > 0: firstDiff", "number of trailing characters; tile size is * determined by", "tileAddress + SEPARATOR if len(tileAddress) == TileSize.PINPOINT.getCodeLength(): detectedTileSize = TileSize.PINPOINT", "'0' characters */''' return self.getWrappedOpenLocationCode() def getNeighbors(self, eight_point_direction=None): '''/** *", "0] #long_diff = [-1, 0, +1, +1, +1, 0, -1,", "potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self, otherTile): '''/** * Calculates the Manhattan (city", "Exception(\"Tile sizes don't match\") return self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile, True)", "= \"00\" PADDING_4 = \"0000\" PADDING_6 = \"000000\" CODE_ALPHABET =", "size; * may return less than 8 neighbors for tiles", "= re.findall('..', base) ''' north will be Xd east dX", "SW, SE... should return only one tile''' ordinal_digit_dict = {", "go the other way around, do so''' firstDiff = self.characterDistance(c1,", "for the whole tile. * @return a plus code for", "len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles", "trailing characters; tile size is * determined by the length", "than allowed by desired_tile_size\") iterations_needed = desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2 address_set", "None elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise", "or (lat and long)): raise Exception(\"Invalid OpenGeoTile constructor arguments\") if", "if not (code or (code and tile_size) or (lat and", "and CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles '''", "in neighbors: if potentialNeighbor.isSameTile(neighbor): return True return False else: '''//tiles", "in the same or adjacent * \"tiles\", to determine all", "if x[0] in ['2', 'X'] or x[1] in ['2', 'X']}", "list, str]: raise Exception(\"eight_point_direction must be of type list or", "if not all_border_set: north_base_set = memoized_digit_dict.get(f\"N{iterations_needed}\") if not north_base_set: self.memoizeDigitDict(\"N\",", "The character used to pad codes. PADDING_CHARACTER = '0' PADDING_2", "20.0) ''' An area of 1° x 1°. The side", "returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength(): ''' tile is desired", "for direction in directions: lat_diff, long_diff = direction_dict.get(direction) ''' //OLC", "x in BASE_20_BORDER_SET if x[0] == '2'} WEST_DIGITS = {x", "direction in directions_list if direction in uppercase_input_directions] neighbors = set()", "SEPARATOR + code[-2:] else: code = code[:-2] + SEPARATOR return", "return_set_of_subaddresses(address_set) tile_set = {OpenGeoTile(address) for address in address_set} return tile_set", "self.code = modified_plus_code.upper() self.tile_size = tile_size def constructTileFromLatLong(self, lat: float,", "if biggerTile.contains(neighbor): return True return False def contains(self, potentialMember): '''/**", "False)) return math.atan2(yDiff, xDiff) def getEightPointDirectionOfNeighbor(self, neighborTile): ''' returns neighbor's", "memoized_digit_dict[f\"W{iterations_needed}\"] = west_set all_border_set = north_set | east_set | south_set", "the same size. * @param otherTile another tile of the", "a distance in tiles etc. * * Open Location Code", "IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(String)} or thrown if", "sizes don't match\") self_tile_x = self.getTileAddress()[-2] self_tile_y = self.getTileAddress()[-1] other_tile_x", "on the globe, but can be up to approximately 275m.", "characters removed. In case of a GLOBAL tile, * returns", "by swaping digits ''' all_border_set = memoized_digit_dict.get(f\"A{iterations_needed}\") if not all_border_set:", "10, which corresponds to a valid * {@link com.google.openlocationcode.OpenLocationCode} after", "is neighboring this one. * @param potentialNeighbor the OpenGeoTile to", "OpenGeoTile to check * @return true if tile sizes and", "potentialNeighbor.getTileSize() == self.getTileSize(): '''//avoid iterating over neighbors for same tile'''", "relevant_digit if not west_exists: west_base += relevant_digit + \"2\" if", "value, we need to take care of wrapping - basically,", "other_tile_y: ''' one tile is above the other ''' if", "will have a larger TileSize. * @throws IllegalArgumentException if olc", "isinstance(eight_point_direction, str): directions = [] if eight_point_direction.upper() in directions_list: directions.append(eight_point_direction.upper())", "iterations_needed = desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2 address_set = set([self.getTileAddress()]) for i", "radians, 0 being an eastward direction, +/- PI being westward", "a new OpenGeoTile from lat/long coordinates. * @param latitude latitude", "Tile addresses will be 6 characters long.''' DISTRICT = (6,", "lat: float, long: float, tile_size=None): '''/** * Creates a new", "self.getTileAddress() def isNeighbor(self, potentialNeighbor): '''/** * Check if a tile", "set([self.getTileAddress()]) for i in range(int(iterations_needed)): address_set = return_set_of_subaddresses(address_set) tile_set =", "= {x for x in BASE_20_BORDER_SET if x[1] == '2'}", "TileSize.PINPOINT.getCodeLength(): self.tile_size = TileSize.PINPOINT else: raise Exception(\"Too precise, sort this", "tile_size: tile_size = TileSize.PINPOINT self.code = olc.encode(lat, long, tile_size.getCodeLength()).upper() self.tile_size", "abs(firstDiff) > NUM_CHARACTERS_USED/2: if firstDiff > 0: firstDiff -= NUM_CHARACTERS_USED" ]
[ "typ ) if not handler: if hasattr( typ, '__mro__' ):", "not isinstance( types, (list,tuple)): types = [ types ] for", "hasattr( handler, 'registerEquivalent' ): handler.registerEquivalent( typ, base ) return handler", "try: from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT except ImportError as", "AttributeError as err: typ = type(value) handler = self.get( typ", "arrayToGLType( cls, value ): \"\"\"Given a data-value, guess the OpenGL", "classmethod( logs.logOnFail( unitSize, _log ) ) def zeros( cls, dims,", "'registerEquivalent' ): handler.registerEquivalent( typ, base ) return handler raise TypeError(", "handler ) def registerReturn( self, handler ): \"\"\"Register this handler", "= GLcharARBArray = ADT( GL_1_1.GL_BYTE, _types.GLchar ) GLshortArray = ADT(", "class ArrayDatatype( object ): \"\"\"Mix-in for array datatype classes The", "= classmethod( logs.logOnFail( voidDataPointer, _log ) ) def typedPointer( cls,", "GL_1_1.GL_UNSIGNED_SHORT : GLushortArray, GL_1_1.GL_UNSIGNED_INT : GLuintArray, #GL_1_1.GL_UNSIGNED_INT : GLenumArray, }", "or cls.typeConstant ) unitSize = classmethod( logs.logOnFail( unitSize, _log )", "ADT = None if acceleratesupport.ACCELERATE_AVAILABLE: try: from OpenGL_accelerate.arraydatatype import ArrayDatatype", ")): \"\"\"Array datatype for GLbyte types\"\"\" baseType = _types.GLbyte typeConstant", "from_param = classmethod( logs.logOnFail( from_param, _log ) ) def dataPointer(", "for type %s.%s (value: %s) registered\"\"\"%( typ.__module__, type.__name__, repr(value)[:50] )", "ArrayDatatype, ctypes.POINTER(_types.GLvoid )): \"\"\"Array datatype for GLenum types\"\"\" baseType =", "datatype for GLsizei types\"\"\" baseType = _types.GLsizei typeConstant = _types.GL_INT", "datatype for GLshort types\"\"\" baseType = _types.GLshort typeConstant = _types.GL_SHORT", "return array of the given dimensions filled with zeros\"\"\" return", "cls, value ): \"\"\"Given a data-value, try to determine number", "try: return plugin.load() except ImportError as err: return None else:", "for GLbyte types\"\"\" baseType = _types.GLbyte typeConstant = _types.GL_BYTE class", "# Python-coded version class HandlerRegistry( dict ): GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays']", "object\"\"\" if self.output_handler is None: if self.preferredOutput is not None:", ") GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum ) GLsizeiArray = ADT(", "= ADT( None, None ) GLclampdArray = ADT( GL_1_1.GL_DOUBLE, _types.GLclampd", "): \"\"\"Register this handler as the default return-type handler\"\"\" if", "in types: self[ type ] = handler if handler.isOutput: self.all_output_handlers.append(", "\"\"\"Given a data-value, guess the OpenGL type of the corresponding", "GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte )): \"\"\"Array datatype for GLbyte types\"\"\" baseType", "class GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid )): \"\"\"Array datatype for GLenum types\"\"\"", "found'%(name,)) def get_output_handler( self ): \"\"\"Fast-path lookup for output handler", "_types.GLclampd typeConstant = _types.GL_DOUBLE class GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf )): \"\"\"Array", "base ) return handler raise TypeError( \"\"\"No array-type handler for", "pointer\"\"\" return cls.getHandler(value).from_param( value, cls.typeConstant ) from_param = classmethod( logs.logOnFail(", "logs.logOnFail( asArray, _log ) ) def arrayToGLType( cls, value ):", "dimensions (assumes full structure info)\"\"\" return cls.getHandler(value).dimensions( value ) dimensions", "cls.typeConstant ) from_param = classmethod( logs.logOnFail( from_param, _log ) )", "_types.GLsizei typeConstant = _types.GL_INT class GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid )): \"\"\"Array", "array datatype classes The ArrayDatatype marker essentially is used to", "baseType = _types.GLenum typeConstant = _types.GL_UNSIGNED_INT class GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei", "dataPointer( cls, value ): \"\"\"Given a value in a known", "this class as handler for given set of types\"\"\" if", "_log.info( 'Using accelerated ArrayDatatype' ) ArrayDatatype = ADT( None, None", "_log ) ) def voidDataPointer( cls, value ): \"\"\"Given value", "cls.getHandler(value).asArray( value, typeCode or cls.typeConstant ) asArray = classmethod( logs.logOnFail(", "ArrayDatatype( object ): \"\"\"Mix-in for array datatype classes The ArrayDatatype", "types\"\"\" baseType = _types.GLbyte typeConstant = _types.GL_BYTE class GLcharArray( ArrayDatatype,", "self.output_handler = self.handler_by_plugin_name( preferred ) if self.output_handler: break if not", "): \"\"\"Register this class as handler for given set of", "accelerator from OpenGL_accelerate\" ) if ADT is None: # Python-coded", "def get_output_handler( self ): \"\"\"Fast-path lookup for output handler object\"\"\"", "ArrayDatatype, ctypes.POINTER(_types.GLint )): \"\"\"Array datatype for GLint types\"\"\" baseType =", "ADT( None, None ) GLclampdArray = ADT( GL_1_1.GL_DOUBLE, _types.GLclampd )", "= _types.GLclampd typeConstant = _types.GL_DOUBLE class GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf )):", ") try: return ctypes.c_void_p(pointer) except TypeError as err: return pointer", "ADT is None: # Python-coded version class HandlerRegistry( dict ):", "given dimensions filled with zeros\"\"\" return cls.returnHandler().zeros( dims, typeCode or", "types\"\"\" baseType = _types.GLfloat typeConstant = _types.GL_FLOAT class GLdoubleArray( ArrayDatatype,", "GL_1_1.GL_SHORT, _types.GLshort ) GLintArray = ADT( GL_1_1.GL_INT, _types.GLint ) GLubyteArray", "class GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64 )): \"\"\"Array datatype for GLuint types\"\"\"", "HandlerRegistry( plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY class ArrayDatatype( object ): \"\"\"Mix-in", "value ): \"\"\"Given value in a known data-pointer type, return", "given value\"\"\" try: typ = value.__class__ except AttributeError as err:", "get_output_handler( self ): \"\"\"Fast-path lookup for output handler object\"\"\" if", "cls, value, typeCode=None ): \"\"\"Determine unit size of an array", "types: self[ type ] = handler if handler.isOutput: self.all_output_handlers.append( handler", "GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY class ArrayDatatype( object", "ctypes pointer\"\"\" return cls.getHandler(value).from_param( value, cls.typeConstant ) from_param = classmethod(", "ctypes.POINTER(_types.GLuint )): \"\"\"Array datatype for GLuint types\"\"\" baseType = _types.GLuint", "handler: handler = handler.load() if handler: handler = handler() if", "data-type classes... class GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd )): \"\"\"Array datatype for", "GLdoubleArray = ADT( GL_1_1.GL_DOUBLE, _types.GLdouble ) GLfloatArray = ADT( GL_1_1.GL_FLOAT,", "classmethod( logs.logOnFail( voidDataPointer, _log ) ) def typedPointer( cls, value", "\"\"\"Given a data-value, get the dimensions (assumes full structure info)\"\"\"", "GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf )): \"\"\"Array datatype for GLclampf types\"\"\" baseType", "return handler raise TypeError( \"\"\"No array-type handler for type %s.%s", "GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64 )): \"\"\"Array datatype for GLuint types\"\"\" baseType", "_types.GLvoidp ) GL_CONSTANT_TO_ARRAY_TYPE = { GL_1_1.GL_DOUBLE : GLclampdArray, GL_1_1.GL_FLOAT :", "voidDataPointer = classmethod( logs.logOnFail( voidDataPointer, _log ) ) def typedPointer(", "= plugin_match self.output_handler = None self.preferredOutput = None self.all_output_handlers =", "plugins.FormatHandler.by_name( name ) if plugin: try: return plugin.load() except ImportError", "_types.GLint typeConstant = _types.GL_INT class GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte )): \"\"\"Array", "dict ): GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays'] def __init__( self, plugin_match ):", "dataPointer = classmethod( logs.logOnFail( dataPointer, _log ) ) def voidDataPointer(", "for output handler object\"\"\" if self.output_handler is None: if self.preferredOutput", "as err: _log.warn( \"\"\"Failure in dataPointer for %s instance %s\"\"\",", ") def typedPointer( cls, value ): \"\"\"Return a pointer-to-base-type pointer", "final array data-type classes... class GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd )): \"\"\"Array", "particular argument as having an \"array\" type, which means that", "return cls.getHandler(value).arrayToGLType( value ) arrayToGLType = classmethod( logs.logOnFail( arrayToGLType, _log", "arrays sub-package and its registered handlers. \"\"\" typeConstant = None", "GLOBAL_REGISTRY getHandler = GLOBAL_REGISTRY.__call__ returnHandler = GLOBAL_REGISTRY.get_output_handler isAccelerated = False", "= ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 ) GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum", "hasattr( typ, '__mro__' ): for base in typ.__mro__: handler =", "get the dimensions (assumes full structure info)\"\"\" return cls.getHandler(value).dimensions( value", "handler as the default return-type handler\"\"\" if isinstance( handler, (str,unicode)):", "classmethod( logs.logOnFail( dimensions, _log ) ) def arrayByteCount( cls, value", "= ADT( GL_1_1.GL_DOUBLE, _types.GLclampd ) GLclampfArray = ADT( GL_1_1.GL_FLOAT, _types.GLclampf", "ArrayDatatype, ctypes.POINTER(_types.GLenum )): \"\"\"Array datatype for GLenum types\"\"\" baseType =", "(list,tuple)): types = [ types ] for type in types:", "ADT( _types.GL_VOID_P, _types.GLvoidp ) GL_CONSTANT_TO_ARRAY_TYPE = { GL_1_1.GL_DOUBLE : GLclampdArray,", "GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT : GLushortArray, GL_1_1.GL_UNSIGNED_INT : GLuintArray, #GL_1_1.GL_UNSIGNED_INT", "type(value) handler = self.get( typ ) if not handler: if", "in dataPointer for %s instance %s\"\"\", type(value), value, ) raise", "= handler if handler.isOutput: self.all_output_handlers.append( handler ) def registerReturn( self,", "ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort ) GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint )", "GL_1_1.GL_DOUBLE, _types.GLclampd ) GLclampfArray = ADT( GL_1_1.GL_FLOAT, _types.GLclampf ) GLdoubleArray", "= _types.GLshort typeConstant = _types.GL_SHORT class GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint )):", "self ): \"\"\"Fast-path lookup for output handler object\"\"\" if self.output_handler", "cls.dataPointer(value), ctypes.POINTER( cls.baseType )) typedPointer = classmethod( typedPointer ) def", "to determine number of bytes it's final form occupies For", "baseType = _types.GLvoidp typeConstant = _types.GL_VOID_P else: # Cython-coded array", "self.output_handler def register( self, handler, types=None ): \"\"\"Register this class", "zeros\"\"\" return cls.returnHandler().zeros( dims, typeCode or cls.typeConstant ) zeros =", "GLshortArray, GL_1_1.GL_INT : GLintArray, GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT : GLushortArray,", ")): \"\"\"Array datatype for GLuint types\"\"\" baseType = _types.GLuint64 typeConstant", "GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64 )): \"\"\"Array datatype for GLuint types\"\"\" baseType", "datatype for GLclampd types\"\"\" baseType = _types.GLclampd typeConstant = _types.GL_DOUBLE", "a known data-pointer type, convert to a ctypes pointer\"\"\" return", "a data-value, get the dimensions (assumes full structure info)\"\"\" return", "handlers. \"\"\" typeConstant = None handler = GLOBAL_REGISTRY getHandler =", "pointer-to-base-type pointer for given value\"\"\" return ctypes.cast( cls.dataPointer(value), ctypes.POINTER( cls.baseType", "the given dimensions filled with zeros\"\"\" return cls.returnHandler().zeros( dims, typeCode", "as err: return None else: raise RuntimeError( 'No handler of", "baseType = _types.GLsizei typeConstant = _types.GL_INT class GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid", "import acceleratesupport ADT = None if acceleratesupport.ACCELERATE_AVAILABLE: try: from OpenGL_accelerate.arraydatatype", "for handling via the arrays sub-package and its registered handlers.", "= _types.GLint64 typeConstant = None # TODO: find out what", "): \"\"\"Given a data-value, try to determine number of bytes", "class GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte )): \"\"\"Array datatype for GLubyte types\"\"\"", "_types.GLshort ) GLintArray = ADT( GL_1_1.GL_INT, _types.GLint ) GLubyteArray =", "GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum )): \"\"\"Array datatype for GLenum types\"\"\" baseType", "typ.__mro__: handler = self.get( base ) if not handler: handler", "= ADT( GL_1_1.GL_FLOAT, _types.GLclampf ) GLdoubleArray = ADT( GL_1_1.GL_DOUBLE, _types.GLdouble", ")): \"\"\"Array datatype for GLfloat types\"\"\" baseType = _types.GLfloat typeConstant", "GL_1_1.GL_FLOAT, _types.GLclampf ) GLdoubleArray = ADT( GL_1_1.GL_DOUBLE, _types.GLdouble ) GLfloatArray", "types\"\"\" if not isinstance( types, (list,tuple)): types = [ types", "GL_1_1.GL_SHORT : GLshortArray, GL_1_1.GL_INT : GLintArray, GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT", "_types.GLshort typeConstant = _types.GL_SHORT class GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint )): \"\"\"Array", "ctypes.POINTER(_types.GLshort )): \"\"\"Array datatype for GLshort types\"\"\" baseType = _types.GLshort", "GLshortArray = ADT( GL_1_1.GL_SHORT, _types.GLshort ) GLintArray = ADT( GL_1_1.GL_INT,", "a particular argument as having an \"array\" type, which means", "None: if self.preferredOutput is not None: self.output_handler = self.handler_by_plugin_name( self.preferredOutput", "self.all_output_handlers = [] def __call__( self, value ): \"\"\"Lookup of", "handler object\"\"\" if self.output_handler is None: if self.preferredOutput is not", "ArrayDatatype, ctypes.POINTER(_types.GLushort )): \"\"\"Array datatype for GLushort types\"\"\" baseType =", "pointer\"\"\" pointer = cls.dataPointer( value ) try: return ctypes.c_void_p(pointer) except", ") def asArray( cls, value, typeCode=None ): \"\"\"Given a value,", "data-value, calculate dimensions for the array (number-of-units)\"\"\" return cls.getHandler(value).arraySize( value,", "GLuint types\"\"\" baseType = _types.GLuint64 typeConstant = _types.GL_UNSIGNED_INT64 class GLenumArray(", "class GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf )): \"\"\"Array datatype for GLclampf types\"\"\"", "a data-value, calculate dimensions for the array (number-of-units)\"\"\" return cls.getHandler(value).arraySize(", "break if not self.output_handler: raise RuntimeError( \"\"\"Unable to find any", "type ] = handler if handler.isOutput: self.all_output_handlers.append( handler ) def", "_types.GLushort typeConstant = _types.GL_UNSIGNED_SHORT class GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint )): \"\"\"Array", "typeConstant = _types.GL_UNSIGNED_SHORT class GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint )): \"\"\"Array datatype", "err: return pointer voidDataPointer = classmethod( logs.logOnFail( voidDataPointer, _log )", "it is eligible for handling via the arrays sub-package and", "ArrayDatatype, ctypes.POINTER(_types.GLuint64 )): \"\"\"Array datatype for GLuint types\"\"\" baseType =", "The ArrayDatatype marker essentially is used to mark a particular", ") GLvoidpArray = ADT( _types.GL_VOID_P, _types.GLvoidp ) GL_CONSTANT_TO_ARRAY_TYPE = {", "GL array types\"\"\" import ctypes import OpenGL from OpenGL.raw.GL import", ") arrayByteCount = classmethod( logs.logOnFail( arrayByteCount, _log ) ) #", "= HandlerRegistry( plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY class ArrayDatatype( object ):", "= classmethod( logs.logOnFail( zeros, _log ) ) def dimensions( cls,", "preferred in self.GENERIC_OUTPUT_PREFERENCES: self.output_handler = self.handler_by_plugin_name( preferred ) if self.output_handler:", "= ADT( GL_1_1.GL_BYTE, _types.GLbyte ) GLcharArray = GLcharARBArray = ADT(", "ctypes/numpy ones!)\"\"\" ) return self.output_handler def register( self, handler, types=None", "\"\"\"Array datatype for GLubyte types\"\"\" baseType = _types.GLubyte typeConstant =", "PyOpenGL and may be removed eventually. \"\"\" return cls.getHandler(value).arrayToGLType( value", "all (not even ctypes/numpy ones!)\"\"\" ) return self.output_handler def register(", "_types.GL_FLOAT class GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat )): \"\"\"Array datatype for GLfloat", "= self.get( base ) if not handler: handler = self.match(", "typ.__module__, type.__name__, repr(value)[:50] ) ) return handler def handler_by_plugin_name( self,", "value, cls.typeConstant ) from_param = classmethod( logs.logOnFail( from_param, _log )", "for GL array types\"\"\" import ctypes import OpenGL from OpenGL.raw.GL", "handler.registerEquivalent( typ, base ) return handler raise TypeError( \"\"\"No array-type", "currently used in PyOpenGL and may be removed eventually. \"\"\"", "_types.GLint64 typeConstant = None # TODO: find out what this", "as the default return-type handler\"\"\" if isinstance( handler, (str,unicode)): self.preferredOutput", ") ) def unitSize( cls, value, typeCode=None ): \"\"\"Determine unit", "class GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint )): \"\"\"Array datatype for GLint types\"\"\"", ") def arrayToGLType( cls, value ): \"\"\"Given a data-value, guess", "return cls.getHandler(value).arrayByteCount( value ) arrayByteCount = classmethod( logs.logOnFail( arrayByteCount, _log", "_arrayconstants as GL_1_1 from OpenGL import logs _log = logs.getLog(", "if self.output_handler: break if not self.output_handler: raise RuntimeError( \"\"\"Unable to", "a ctypes pointer\"\"\" return cls.getHandler(value).from_param( value, cls.typeConstant ) from_param =", "registry\"\"\" return cls.handler def from_param( cls, value, typeConstant=None ): \"\"\"Given", "if handler: handler = handler.load() if handler: handler = handler()", "GLvoidpArray = ADT( _types.GL_VOID_P, _types.GLvoidp ) GL_CONSTANT_TO_ARRAY_TYPE = { GL_1_1.GL_DOUBLE", "_types.GL_INT class GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte )): \"\"\"Array datatype for GLubyte", "value, typeCode or cls.typeConstant ) unitSize = classmethod( logs.logOnFail( unitSize,", "_types.GL_VOID_P else: # Cython-coded array handler _log.info( 'Using accelerated ArrayDatatype'", "array types\"\"\" import ctypes import OpenGL from OpenGL.raw.GL import _types", "GLuint types\"\"\" baseType = _types.GLuint typeConstant = _types.GL_UNSIGNED_INT class GLint64Array(", "or cls.typeConstant ) zeros = classmethod( logs.logOnFail( zeros, _log )", "used to mark a particular argument as having an \"array\"", "= None if acceleratesupport.ACCELERATE_AVAILABLE: try: from OpenGL_accelerate.arraydatatype import ArrayDatatype as", "\"\"\"Array datatype for GLclampd types\"\"\" baseType = _types.GLclampd typeConstant =", "GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble )): \"\"\"Array datatype for GLdouble types\"\"\" baseType", "typeConstant=None ): \"\"\"Given a value in a known data-pointer type,", "value, typeConstant=None ): \"\"\"Given a value in a known data-pointer", "\"\"\"Array datatype for GLint types\"\"\" baseType = _types.GLint typeConstant =", "_types.GLfloat typeConstant = _types.GL_FLOAT class GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble )): \"\"\"Array", "\"\"\"Given a value in a known data-pointer type, convert to", "GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays'] def __init__( self, plugin_match ): self.match =", "self.GENERIC_OUTPUT_PREFERENCES: self.output_handler = self.handler_by_plugin_name( preferred ) if self.output_handler: break if", "essentially is used to mark a particular argument as having", "self[ type ] = handler if handler.isOutput: self.all_output_handlers.append( handler )", ")): \"\"\"Array datatype for GLuint types\"\"\" baseType = _types.GLint64 typeConstant", ": GLbyteArray, GL_1_1.GL_SHORT : GLshortArray, GL_1_1.GL_INT : GLintArray, GL_1_1.GL_UNSIGNED_BYTE :", "return cls.returnHandler().zeros( dims, typeCode or cls.typeConstant ) zeros = classmethod(", "ctypes.POINTER(_types.GLint )): \"\"\"Array datatype for GLint types\"\"\" baseType = _types.GLint", ") if handler: handler = handler.load() if handler: handler =", "extension pointers-to-arrays\"\"\" baseType = _types.GLchar typeConstant = _types.GL_BYTE GLcharARBArray =", "GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort )): \"\"\"Array datatype for GLushort types\"\"\" baseType", "return cls.getHandler(value).from_param( value, cls.typeConstant ) from_param = classmethod( logs.logOnFail( from_param,", "GLsizeiArray = ADT( GL_1_1.GL_INT, _types.GLsizei ) GLvoidpArray = ADT( _types.GL_VOID_P,", "_types.GLchar ) GLshortArray = ADT( GL_1_1.GL_SHORT, _types.GLshort ) GLintArray =", "data-value, guess the OpenGL type of the corresponding pointer Note:", "types\"\"\" baseType = _types.GLshort typeConstant = _types.GL_SHORT class GLintArray( ArrayDatatype,", "dataPointer, _log ) ) def voidDataPointer( cls, value ): \"\"\"Given", "= [] def __call__( self, value ): \"\"\"Lookup of handler", "= _types.GL_UNSIGNED_INT class GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei )): \"\"\"Array datatype for", "= handler() if handler: self[ typ ] = handler if", "known data-pointer type, return void_p for pointer\"\"\" pointer = cls.dataPointer(", "the final array data-type classes... class GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd )):", "GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei )): \"\"\"Array datatype for GLsizei types\"\"\" baseType", "types\"\"\" baseType = _types.GLvoidp typeConstant = _types.GL_VOID_P else: # Cython-coded", ")): \"\"\"Array datatype for GLdouble types\"\"\" baseType = _types.GLdouble typeConstant", "'__mro__' ): for base in typ.__mro__: handler = self.get( base", "_log ) ) def unitSize( cls, value, typeCode=None ): \"\"\"Determine", "GL_1_1.GL_BYTE, _types.GLbyte ) GLcharArray = GLcharARBArray = ADT( GL_1_1.GL_BYTE, _types.GLchar", "data-pointer type, return void_p for pointer\"\"\" pointer = cls.dataPointer( value", "isinstance( handler, (str,unicode)): self.preferredOutput = handler self.output_handler = None else:", "from OpenGL import logs _log = logs.getLog( 'OpenGL.arrays.arraydatatype' ) from", "OpenGL.raw.GL import _types from OpenGL import plugins from OpenGL.arrays import", "occupies For most data-types this is arraySize() * atomic-unit-size \"\"\"", "def register( self, handler, types=None ): \"\"\"Register this class as", "classmethod( logs.logOnFail( arrayToGLType, _log ) ) def arraySize( cls, value,", "value ) arrayToGLType = classmethod( logs.logOnFail( arrayToGLType, _log ) )", "except TypeError as err: return pointer voidDataPointer = classmethod( logs.logOnFail(", "): \"\"\"Given a data-value, get the dimensions (assumes full structure", "return ctypes.cast( cls.dataPointer(value), ctypes.POINTER( cls.baseType )) typedPointer = classmethod( typedPointer", "for pointer\"\"\" pointer = cls.dataPointer( value ) try: return ctypes.c_void_p(pointer)", "= _types.GL_FLOAT class GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat )): \"\"\"Array datatype for", "GL_1_1.GL_FLOAT : GLclampfArray, GL_1_1.GL_FLOAT : GLfloatArray, GL_1_1.GL_DOUBLE : GLdoubleArray, GL_1_1.GL_BYTE", "= _types.GL_DOUBLE class GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf )): \"\"\"Array datatype for", "for %s instance %s\"\"\", type(value), value, ) raise dataPointer =", "a return array of the given dimensions filled with zeros\"\"\"", "\"\"\"Given a value, convert to preferred array representation\"\"\" return cls.getHandler(value).asArray(", "= _types.GL_VOID_P else: # Cython-coded array handler _log.info( 'Using accelerated", "\"\"\"Given a value in a known data-pointer type, return long", "_types.GL_UNSIGNED_INT class GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei )): \"\"\"Array datatype for GLsizei", "ArrayDatatype, ctypes.POINTER(_types.GLint64 )): \"\"\"Array datatype for GLuint types\"\"\" baseType =", "GLint types\"\"\" baseType = _types.GLint typeConstant = _types.GL_INT class GLubyteArray(", "datatype for GLuint types\"\"\" baseType = _types.GLuint64 typeConstant = _types.GL_UNSIGNED_INT64", ") GLshortArray = ADT( GL_1_1.GL_SHORT, _types.GLshort ) GLintArray = ADT(", "for GLenum types\"\"\" baseType = _types.GLvoidp typeConstant = _types.GL_VOID_P else:", "GLclampfArray, GL_1_1.GL_FLOAT : GLfloatArray, GL_1_1.GL_DOUBLE : GLdoubleArray, GL_1_1.GL_BYTE : GLbyteArray,", "return cls.getHandler(value).dimensions( value ) dimensions = classmethod( logs.logOnFail( dimensions, _log", "of handler for given value\"\"\" try: typ = value.__class__ except", "RuntimeError( 'No handler of name %s found'%(name,)) def get_output_handler( self", "value ) dimensions = classmethod( logs.logOnFail( dimensions, _log ) )", "= ADT( None, _types.GLint64 ) GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64", "= self.get( typ ) if not handler: if hasattr( typ,", "): \"\"\"Given a value in a known data-pointer type, return", "if handler: self[ typ ] = handler if hasattr( handler,", "\"\"\"Return a pointer-to-base-type pointer for given value\"\"\" return ctypes.cast( cls.dataPointer(value),", "= ADT( GL_1_1.GL_DOUBLE, _types.GLdouble ) GLfloatArray = ADT( GL_1_1.GL_FLOAT, _types.GLfloat", "it's final form occupies For most data-types this is arraySize()", "in PyOpenGL and may be removed eventually. \"\"\" return cls.getHandler(value).arrayToGLType(", "= GLOBAL_REGISTRY.__call__ returnHandler = GLOBAL_REGISTRY.get_output_handler isAccelerated = False @classmethod def", "this is not currently used in PyOpenGL and may be", "): \"\"\"Given a value in a known data-pointer type, convert", "= GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte ) GLushortArray = ADT(", "logs.logOnFail( voidDataPointer, _log ) ) def typedPointer( cls, value ):", "_types.GLdouble typeConstant = _types.GL_DOUBLE class GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte )): \"\"\"Array", "= ADT( _types.GL_VOID_P, _types.GLvoidp ) GL_CONSTANT_TO_ARRAY_TYPE = { GL_1_1.GL_DOUBLE :", "its registered handlers. \"\"\" typeConstant = None handler = GLOBAL_REGISTRY", "typedPointer ) def asArray( cls, value, typeCode=None ): \"\"\"Given a", "unitSize( cls, value, typeCode=None ): \"\"\"Determine unit size of an", "structure info)\"\"\" return cls.getHandler(value).dimensions( value ) dimensions = classmethod( logs.logOnFail(", "typeConstant = _types.GL_FLOAT class GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble )): \"\"\"Array datatype", "_log.warn( \"Unable to load ArrayDatatype accelerator from OpenGL_accelerate\" ) if", "err: typ = type(value) handler = self.get( typ ) if", "logs.logOnFail( dataPointer, _log ) ) def voidDataPointer( cls, value ):", "baseType = _types.GLfloat typeConstant = _types.GL_FLOAT class GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble", "name ) if plugin: try: return plugin.load() except ImportError as", "our local type if defined, otherwise asks the handler to", "types\"\"\" baseType = _types.GLclampf typeConstant = _types.GL_FLOAT class GLfloatArray( ArrayDatatype,", "logs.logOnFail( arrayByteCount, _log ) ) # the final array data-type", "types\"\"\" baseType = _types.GLdouble typeConstant = _types.GL_DOUBLE class GLbyteArray( ArrayDatatype,", "def unitSize( cls, value, typeCode=None ): \"\"\"Determine unit size of", "GLcharArray class GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort )): \"\"\"Array datatype for GLshort", ") ) def voidDataPointer( cls, value ): \"\"\"Given value in", "err: return None else: raise RuntimeError( 'No handler of name", "logs.logOnFail( unitSize, _log ) ) def zeros( cls, dims, typeCode=None", "try: return ctypes.c_void_p(pointer) except TypeError as err: return pointer voidDataPointer", "handler = self.match( base ) if handler: handler = handler.load()", "\"\"\"Array datatype for GLfloat types\"\"\" baseType = _types.GLfloat typeConstant =", "= GLubyteArray class GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort )): \"\"\"Array datatype for", "even ctypes/numpy ones!)\"\"\" ) return self.output_handler def register( self, handler,", "type %s.%s (value: %s) registered\"\"\"%( typ.__module__, type.__name__, repr(value)[:50] ) )", "= classmethod( logs.logOnFail( arraySize, _log ) ) def unitSize( cls,", "to mark a particular argument as having an \"array\" type,", "if handler.isOutput: self.all_output_handlers.append( handler ) def registerReturn( self, handler ):", "_types.GLushort ) GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint ) GLint64Array =", "plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY class ArrayDatatype( object ): \"\"\"Mix-in for", "getHandler = GLOBAL_REGISTRY.__call__ returnHandler = GLOBAL_REGISTRY.get_output_handler isAccelerated = False @classmethod", "self, name ): plugin = plugins.FormatHandler.by_name( name ) if plugin:", "* atomic-unit-size \"\"\" return cls.getHandler(value).arrayByteCount( value ) arrayByteCount = classmethod(", "dataPointer for %s instance %s\"\"\", type(value), value, ) raise dataPointer", "typeCode or cls.typeConstant ) zeros = classmethod( logs.logOnFail( zeros, _log", "GLubyteArray = GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte ) GLushortArray =", "GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat )): \"\"\"Array datatype for GLfloat types\"\"\" baseType", "datatype for GLubyte types\"\"\" baseType = _types.GLubyte typeConstant = _types.GL_UNSIGNED_BYTE", "ctypes.POINTER(_types.GLubyte )): \"\"\"Array datatype for GLubyte types\"\"\" baseType = _types.GLubyte", ") arraySize = classmethod( logs.logOnFail( arraySize, _log ) ) def", ") def registerReturn( self, handler ): \"\"\"Register this handler as", "typeConstant = _types.GL_INT class GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte )): \"\"\"Array datatype", "type, convert to a ctypes pointer\"\"\" return cls.getHandler(value).from_param( value, cls.typeConstant", "= _types.GL_FLOAT class GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble )): \"\"\"Array datatype for", "for GLushort types\"\"\" baseType = _types.GLushort typeConstant = _types.GL_UNSIGNED_SHORT class", "arraySize = classmethod( logs.logOnFail( arraySize, _log ) ) def unitSize(", "ctypes.c_void_p(pointer) except TypeError as err: return pointer voidDataPointer = classmethod(", "otherwise asks the handler to guess... \"\"\" return cls.getHandler(value).unitSize( value,", "= ADT( GL_1_1.GL_BYTE, _types.GLchar ) GLshortArray = ADT( GL_1_1.GL_SHORT, _types.GLshort", "None # TODO: find out what this should be! class", ": GLdoubleArray, GL_1_1.GL_BYTE : GLbyteArray, GL_1_1.GL_SHORT : GLshortArray, GL_1_1.GL_INT :", "classmethod( logs.logOnFail( arrayByteCount, _log ) ) # the final array", "datatype for ARB extension pointers-to-arrays\"\"\" baseType = _types.GLchar typeConstant =", ") GLsizeiArray = ADT( GL_1_1.GL_INT, _types.GLsizei ) GLvoidpArray = ADT(", "self, handler, types=None ): \"\"\"Register this class as handler for", "import _types from OpenGL import plugins from OpenGL.arrays import formathandler,", "raise RuntimeError( 'No handler of name %s found'%(name,)) def get_output_handler(", "= logs.getLog( 'OpenGL.arrays.arraydatatype' ) from OpenGL import acceleratesupport ADT =", "for GLuint types\"\"\" baseType = _types.GLuint typeConstant = _types.GL_UNSIGNED_INT class", "ArrayDatatype, ctypes.c_char_p): \"\"\"Array datatype for ARB extension pointers-to-arrays\"\"\" baseType =", "value ) except Exception as err: _log.warn( \"\"\"Failure in dataPointer", "typeConstant = None handler = GLOBAL_REGISTRY getHandler = GLOBAL_REGISTRY.__call__ returnHandler", "_types.GLclampf typeConstant = _types.GL_FLOAT class GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat )): \"\"\"Array", "ADT( GL_1_1.GL_BYTE, _types.GLchar ) GLshortArray = ADT( GL_1_1.GL_SHORT, _types.GLshort )", "a known data-pointer type, return void_p for pointer\"\"\" pointer =", "register( self, handler, types=None ): \"\"\"Register this class as handler", "def __init__( self, plugin_match ): self.match = plugin_match self.output_handler =", "GL_1_1.GL_DOUBLE : GLclampdArray, GL_1_1.GL_FLOAT : GLclampfArray, GL_1_1.GL_FLOAT : GLfloatArray, GL_1_1.GL_DOUBLE", "ImportError as err: _log.warn( \"Unable to load ArrayDatatype accelerator from", "ctypes.POINTER(_types.GLenum )): \"\"\"Array datatype for GLenum types\"\"\" baseType = _types.GLenum", "_types.GL_BYTE GLcharARBArray = GLcharArray class GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort )): \"\"\"Array", ")): \"\"\"Array datatype for GLint types\"\"\" baseType = _types.GLint typeConstant", "not handler: handler = self.match( base ) if handler: handler", "asArray( cls, value, typeCode=None ): \"\"\"Given a value, convert to", "a value in a known data-pointer type, convert to a", "): for base in typ.__mro__: handler = self.get( base )", "pointer Note: this is not currently used in PyOpenGL and", "cls, value, typeCode=None ): \"\"\"Given a value, convert to preferred", "for GLenum types\"\"\" baseType = _types.GLenum typeConstant = _types.GL_UNSIGNED_INT class", "): self.match = plugin_match self.output_handler = None self.preferredOutput = None", "representation\"\"\" return cls.getHandler(value).asArray( value, typeCode or cls.typeConstant ) asArray =", "datatype for GLushort types\"\"\" baseType = _types.GLushort typeConstant = _types.GL_UNSIGNED_SHORT", "form occupies For most data-types this is arraySize() * atomic-unit-size", "= _types.GLuint64 typeConstant = _types.GL_UNSIGNED_INT64 class GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum )):", "\"\"\"Array datatype for GLuint types\"\"\" baseType = _types.GLuint typeConstant =", "baseType = _types.GLchar typeConstant = _types.GL_BYTE GLcharARBArray = GLcharArray class", "ArrayDatatype, ctypes.POINTER(_types.GLfloat )): \"\"\"Array datatype for GLfloat types\"\"\" baseType =", "handler: handler = self.match( base ) if handler: handler =", "pointers-to-arrays\"\"\" baseType = _types.GLchar typeConstant = _types.GL_BYTE GLcharARBArray = GLcharArray", "\"\"\"Failure in dataPointer for %s instance %s\"\"\", type(value), value, )", "zeros, _log ) ) def dimensions( cls, value ): \"\"\"Given", "_types.GL_UNSIGNED_INT class GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64 )): \"\"\"Array datatype for GLuint", "= self.handler_by_plugin_name( preferred ) if self.output_handler: break if not self.output_handler:", "be! class GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64 )): \"\"\"Array datatype for GLuint", "def registerReturn( self, handler ): \"\"\"Register this handler as the", ") asArray = classmethod( logs.logOnFail( asArray, _log ) ) def", "= None # TODO: find out what this should be!", "= _types.GLdouble typeConstant = _types.GL_DOUBLE class GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte )):", "ArrayDatatype, ctypes.POINTER(_types.GLclampd )): \"\"\"Array datatype for GLclampd types\"\"\" baseType =", "= None self.preferredOutput = None self.all_output_handlers = [] def __call__(", "implementations (abstraction points for GL array types\"\"\" import ctypes import", "return self.output_handler def register( self, handler, types=None ): \"\"\"Register this", "or cls.typeConstant ) arraySize = classmethod( logs.logOnFail( arraySize, _log )", "cls.getHandler(value).unitSize( value, typeCode or cls.typeConstant ) unitSize = classmethod( logs.logOnFail(", "from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT except ImportError as err:", "that it is eligible for handling via the arrays sub-package", "logs.logOnFail( from_param, _log ) ) def dataPointer( cls, value ):", "\"\"\"Array datatype for GLsizei types\"\"\" baseType = _types.GLsizei typeConstant =", "class as handler for given set of types\"\"\" if not", "value, typeCode=None ): \"\"\"Given a value, convert to preferred array", "= _types.GL_UNSIGNED_BYTE GLbooleanArray = GLubyteArray class GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort )):", "baseType = _types.GLushort typeConstant = _types.GL_UNSIGNED_SHORT class GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint", "GLbyteArray = ADT( GL_1_1.GL_BYTE, _types.GLbyte ) GLcharArray = GLcharARBArray =", "handler = handler.load() if handler: handler = handler() if handler:", "if hasattr( typ, '__mro__' ): for base in typ.__mro__: handler", "\"\"\"Array datatype for GLdouble types\"\"\" baseType = _types.GLdouble typeConstant =", "self.output_handler: for preferred in self.GENERIC_OUTPUT_PREFERENCES: self.output_handler = self.handler_by_plugin_name( preferred )", "types\"\"\" baseType = _types.GLsizei typeConstant = _types.GL_INT class GLvoidpArray( ArrayDatatype,", "logs.logOnFail( zeros, _log ) ) def dimensions( cls, value ):", "@classmethod def getRegistry( cls ): \"\"\"Get our handler registry\"\"\" return", "of an array (if possible) Uses our local type if", "of name %s found'%(name,)) def get_output_handler( self ): \"\"\"Fast-path lookup", "GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint ) GLint64Array = ADT( None,", "class GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei )): \"\"\"Array datatype for GLsizei types\"\"\"", "Python-coded version class HandlerRegistry( dict ): GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays'] def", "_types.GLubyte ) GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort ) GLuintArray =", "def arraySize( cls, value, typeCode = None ): \"\"\"Given a", "handler\"\"\" if isinstance( handler, (str,unicode)): self.preferredOutput = handler self.output_handler =", "pointer\"\"\" try: return cls.getHandler(value).dataPointer( value ) except Exception as err:", "{ GL_1_1.GL_DOUBLE : GLclampdArray, GL_1_1.GL_FLOAT : GLclampfArray, GL_1_1.GL_FLOAT : GLfloatArray,", "\"\"\"Array datatype for GLenum types\"\"\" baseType = _types.GLvoidp typeConstant =", "cls, dims, typeCode=None ): \"\"\"Allocate a return array of the", ")): \"\"\"Array datatype for GLuint types\"\"\" baseType = _types.GLuint typeConstant", "if plugin: try: return plugin.load() except ImportError as err: return", "as handler for given set of types\"\"\" if not isinstance(", "OpenGL import logs _log = logs.getLog( 'OpenGL.arrays.arraydatatype' ) from OpenGL", "known data-pointer type, convert to a ctypes pointer\"\"\" return cls.getHandler(value).from_param(", "def typedPointer( cls, value ): \"\"\"Return a pointer-to-base-type pointer for", "GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT : GLushortArray, GL_1_1.GL_UNSIGNED_INT : GLuintArray, #GL_1_1.GL_UNSIGNED_INT : GLenumArray,", "logs _log = logs.getLog( 'OpenGL.arrays.arraydatatype' ) from OpenGL import acceleratesupport", "from_param, _log ) ) def dataPointer( cls, value ): \"\"\"Given", "long for pointer\"\"\" try: return cls.getHandler(value).dataPointer( value ) except Exception", "ArrayDatatype, ctypes.POINTER(_types.GLclampf )): \"\"\"Array datatype for GLclampf types\"\"\" baseType =", "load ArrayDatatype accelerator from OpenGL_accelerate\" ) if ADT is None:", "cls, value ): \"\"\"Given a data-value, get the dimensions (assumes", "# TODO: find out what this should be! class GLuint64Array(", "(value: %s) registered\"\"\"%( typ.__module__, type.__name__, repr(value)[:50] ) ) return handler", "most data-types this is arraySize() * atomic-unit-size \"\"\" return cls.getHandler(value).arrayByteCount(", "ctypes.cast( cls.dataPointer(value), ctypes.POINTER( cls.baseType )) typedPointer = classmethod( typedPointer )", "GLintArray = ADT( GL_1_1.GL_INT, _types.GLint ) GLubyteArray = GLbooleanArray =", "= classmethod( logs.logOnFail( arrayToGLType, _log ) ) def arraySize( cls,", "return cls.getHandler(value).arraySize( value, typeCode or cls.typeConstant ) arraySize = classmethod(", "the dimensions (assumes full structure info)\"\"\" return cls.getHandler(value).dimensions( value )", "typeConstant = _types.GL_VOID_P else: # Cython-coded array handler _log.info( 'Using", ") GLintArray = ADT( GL_1_1.GL_INT, _types.GLint ) GLubyteArray = GLbooleanArray", "err: _log.warn( \"Unable to load ArrayDatatype accelerator from OpenGL_accelerate\" )", "value ): \"\"\"Lookup of handler for given value\"\"\" try: typ", "from OpenGL import plugins from OpenGL.arrays import formathandler, _arrayconstants as", "if not handler: handler = self.match( base ) if handler:", "None: # Python-coded version class HandlerRegistry( dict ): GENERIC_OUTPUT_PREFERENCES =", "baseType = _types.GLdouble typeConstant = _types.GL_DOUBLE class GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte", "%s\"\"\", type(value), value, ) raise dataPointer = classmethod( logs.logOnFail( dataPointer,", "what this should be! class GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64 )): \"\"\"Array", ") def arraySize( cls, value, typeCode = None ): \"\"\"Given", "class HandlerRegistry( dict ): GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays'] def __init__( self,", "GL_1_1.GL_DOUBLE, _types.GLdouble ) GLfloatArray = ADT( GL_1_1.GL_FLOAT, _types.GLfloat ) GLbyteArray", "ADT( None, _types.GLint64 ) GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 )", "GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte ) GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort ) GLuintArray", "HandlerRegistry( dict ): GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays'] def __init__( self, plugin_match", "atomic-unit-size \"\"\" return cls.getHandler(value).arrayByteCount( value ) arrayByteCount = classmethod( logs.logOnFail(", "typ = type(value) handler = self.get( typ ) if not", "baseType = _types.GLuint typeConstant = _types.GL_UNSIGNED_INT class GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64", "baseType = _types.GLuint64 typeConstant = _types.GL_UNSIGNED_INT64 class GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum", "GLclampfArray = ADT( GL_1_1.GL_FLOAT, _types.GLclampf ) GLdoubleArray = ADT( GL_1_1.GL_DOUBLE,", "data-pointer type, return long for pointer\"\"\" try: return cls.getHandler(value).dataPointer( value", "classes... class GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd )): \"\"\"Array datatype for GLclampd", "handler, (str,unicode)): self.preferredOutput = handler self.output_handler = None else: self.preferredOutput", "this handler as the default return-type handler\"\"\" if isinstance( handler,", "lookup for output handler object\"\"\" if self.output_handler is None: if", "from OpenGL.raw.GL import _types from OpenGL import plugins from OpenGL.arrays", "asArray = classmethod( logs.logOnFail( asArray, _log ) ) def arrayToGLType(", "GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 ) GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum ) GLsizeiArray", "eligible for handling via the arrays sub-package and its registered", "a data-value, guess the OpenGL type of the corresponding pointer", "try to determine number of bytes it's final form occupies", "): handler.registerEquivalent( typ, base ) return handler raise TypeError( \"\"\"No", "baseType = _types.GLbyte typeConstant = _types.GL_BYTE class GLcharArray( ArrayDatatype, ctypes.c_char_p):", "_log ) ) def dimensions( cls, value ): \"\"\"Given a", "OpenGL import acceleratesupport ADT = None if acceleratesupport.ACCELERATE_AVAILABLE: try: from", "self.preferredOutput = handler self.output_handler = None else: self.preferredOutput = None", ") if self.output_handler: break if not self.output_handler: raise RuntimeError( \"\"\"Unable", "for the array (number-of-units)\"\"\" return cls.getHandler(value).arraySize( value, typeCode or cls.typeConstant", "= _types.GLclampf typeConstant = _types.GL_FLOAT class GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat )):", "eventually. \"\"\" return cls.getHandler(value).arrayToGLType( value ) arrayToGLType = classmethod( logs.logOnFail(", "def arrayToGLType( cls, value ): \"\"\"Given a data-value, guess the", "): \"\"\"Given a value, convert to preferred array representation\"\"\" return", "for GLclampf types\"\"\" baseType = _types.GLclampf typeConstant = _types.GL_FLOAT class", "_types.GLuint typeConstant = _types.GL_UNSIGNED_INT class GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64 )): \"\"\"Array", "class GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort )): \"\"\"Array datatype for GLshort types\"\"\"", "GLubyteArray class GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort )): \"\"\"Array datatype for GLushort", "for GLint types\"\"\" baseType = _types.GLint typeConstant = _types.GL_INT class", "# the final array data-type classes... class GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd", "is eligible for handling via the arrays sub-package and its", "type in types: self[ type ] = handler if handler.isOutput:", "None if acceleratesupport.ACCELERATE_AVAILABLE: try: from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT", "self.preferredOutput is not None: self.output_handler = self.handler_by_plugin_name( self.preferredOutput ) if", "GLenum types\"\"\" baseType = _types.GLvoidp typeConstant = _types.GL_VOID_P else: #", "cls.getHandler(value).arraySize( value, typeCode or cls.typeConstant ) arraySize = classmethod( logs.logOnFail(", "_types.GL_UNSIGNED_SHORT class GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint )): \"\"\"Array datatype for GLuint", "ArrayDatatype' ) ArrayDatatype = ADT( None, None ) GLclampdArray =", "'OpenGL.arrays.arraydatatype' ) from OpenGL import acceleratesupport ADT = None if", "which means that it is eligible for handling via the", "self, value ): \"\"\"Lookup of handler for given value\"\"\" try:", "= classmethod( logs.logOnFail( dataPointer, _log ) ) def voidDataPointer( cls,", "TypeError( \"\"\"No array-type handler for type %s.%s (value: %s) registered\"\"\"%(", "handler of name %s found'%(name,)) def get_output_handler( self ): \"\"\"Fast-path", "= ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum ) GLsizeiArray = ADT( GL_1_1.GL_INT, _types.GLsizei", "classes The ArrayDatatype marker essentially is used to mark a", "ctypes.POINTER( cls.baseType )) typedPointer = classmethod( typedPointer ) def asArray(", "plugin_match ): self.match = plugin_match self.output_handler = None self.preferredOutput =", "GLclampdArray = ADT( GL_1_1.GL_DOUBLE, _types.GLclampd ) GLclampfArray = ADT( GL_1_1.GL_FLOAT,", "None self.output_handler = handler GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY =", "for GLclampd types\"\"\" baseType = _types.GLclampd typeConstant = _types.GL_DOUBLE class", ") unitSize = classmethod( logs.logOnFail( unitSize, _log ) ) def", ") return self.output_handler def register( self, handler, types=None ): \"\"\"Register", "except AttributeError as err: typ = type(value) handler = self.get(", "class GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte )): \"\"\"Array datatype for GLbyte types\"\"\"", "to find any output handler at all (not even ctypes/numpy", "typeCode or cls.typeConstant ) unitSize = classmethod( logs.logOnFail( unitSize, _log", "typeCode = None ): \"\"\"Given a data-value, calculate dimensions for", "ctypes.POINTER(_types.GLclampd )): \"\"\"Array datatype for GLclampd types\"\"\" baseType = _types.GLclampd", "datatype classes The ArrayDatatype marker essentially is used to mark", "'No handler of name %s found'%(name,)) def get_output_handler( self ):", "%s.%s (value: %s) registered\"\"\"%( typ.__module__, type.__name__, repr(value)[:50] ) ) return", "type, return void_p for pointer\"\"\" pointer = cls.dataPointer( value )", "GL_1_1.GL_FLOAT : GLfloatArray, GL_1_1.GL_DOUBLE : GLdoubleArray, GL_1_1.GL_BYTE : GLbyteArray, GL_1_1.GL_SHORT", "): \"\"\"Return a pointer-to-base-type pointer for given value\"\"\" return ctypes.cast(", "type(value), value, ) raise dataPointer = classmethod( logs.logOnFail( dataPointer, _log", "acceleratesupport.ACCELERATE_AVAILABLE: try: from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT except ImportError", "handler_by_plugin_name( self, name ): plugin = plugins.FormatHandler.by_name( name ) if", "_types from OpenGL import plugins from OpenGL.arrays import formathandler, _arrayconstants", "ones!)\"\"\" ) return self.output_handler def register( self, handler, types=None ):", "class GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint )): \"\"\"Array datatype for GLuint types\"\"\"", "= _types.GL_UNSIGNED_SHORT class GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint )): \"\"\"Array datatype for", "array-type handler for type %s.%s (value: %s) registered\"\"\"%( typ.__module__, type.__name__,", "logs.logOnFail( arraySize, _log ) ) def unitSize( cls, value, typeCode=None", "cls.returnHandler().zeros( dims, typeCode or cls.typeConstant ) zeros = classmethod( logs.logOnFail(", "ArrayDatatype, ctypes.POINTER(_types.GLubyte )): \"\"\"Array datatype for GLubyte types\"\"\" baseType =", "self, plugin_match ): self.match = plugin_match self.output_handler = None self.preferredOutput", "datatype for GLint types\"\"\" baseType = _types.GLint typeConstant = _types.GL_INT", "handler, types=None ): \"\"\"Register this class as handler for given", "GLbooleanArray = GLubyteArray class GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort )): \"\"\"Array datatype", ")): \"\"\"Array datatype for GLclampd types\"\"\" baseType = _types.GLclampd typeConstant", "def __call__( self, value ): \"\"\"Lookup of handler for given", "known data-pointer type, return long for pointer\"\"\" try: return cls.getHandler(value).dataPointer(", "ADT( GL_1_1.GL_INT, _types.GLint ) GLubyteArray = GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE,", "= ADT( GL_1_1.GL_FLOAT, _types.GLfloat ) GLbyteArray = ADT( GL_1_1.GL_BYTE, _types.GLbyte", "] = handler if handler.isOutput: self.all_output_handlers.append( handler ) def registerReturn(", "import OpenGL from OpenGL.raw.GL import _types from OpenGL import plugins", ")): \"\"\"Array datatype for GLclampf types\"\"\" baseType = _types.GLclampf typeConstant", "GL_1_1.GL_INT, _types.GLint ) GLubyteArray = GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte", "classmethod( logs.logOnFail( from_param, _log ) ) def dataPointer( cls, value", "GLcharArray = GLcharARBArray = ADT( GL_1_1.GL_BYTE, _types.GLchar ) GLshortArray =", "formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY class ArrayDatatype( object ): \"\"\"Mix-in for array", "for given set of types\"\"\" if not isinstance( types, (list,tuple)):", ": GLclampdArray, GL_1_1.GL_FLOAT : GLclampfArray, GL_1_1.GL_FLOAT : GLfloatArray, GL_1_1.GL_DOUBLE :", "= False @classmethod def getRegistry( cls ): \"\"\"Get our handler", ") ) def arraySize( cls, value, typeCode = None ):", ") ) def dataPointer( cls, value ): \"\"\"Given a value", "_log.warn( \"\"\"Failure in dataPointer for %s instance %s\"\"\", type(value), value,", "OpenGL type of the corresponding pointer Note: this is not", "registered handlers. \"\"\" typeConstant = None handler = GLOBAL_REGISTRY getHandler", "GLOBAL_REGISTRY.get_output_handler isAccelerated = False @classmethod def getRegistry( cls ): \"\"\"Get", "For most data-types this is arraySize() * atomic-unit-size \"\"\" return", "= classmethod( logs.logOnFail( dimensions, _log ) ) def arrayByteCount( cls,", ") # the final array data-type classes... class GLclampdArray( ArrayDatatype,", "baseType = _types.GLshort typeConstant = _types.GL_SHORT class GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint", "dimensions, _log ) ) def arrayByteCount( cls, value ): \"\"\"Given", "value in a known data-pointer type, return long for pointer\"\"\"", "\"\"\" return cls.getHandler(value).arrayByteCount( value ) arrayByteCount = classmethod( logs.logOnFail( arrayByteCount,", "output handler object\"\"\" if self.output_handler is None: if self.preferredOutput is", "typeCode=None ): \"\"\"Allocate a return array of the given dimensions", "typeCode or cls.typeConstant ) arraySize = classmethod( logs.logOnFail( arraySize, _log", "= _types.GLuint typeConstant = _types.GL_UNSIGNED_INT class GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64 )):", "def getRegistry( cls ): \"\"\"Get our handler registry\"\"\" return cls.handler", "datatype for GLenum types\"\"\" baseType = _types.GLvoidp typeConstant = _types.GL_VOID_P", "_types.GL_UNSIGNED_BYTE GLbooleanArray = GLubyteArray class GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort )): \"\"\"Array", "): \"\"\"Get our handler registry\"\"\" return cls.handler def from_param( cls,", "is used to mark a particular argument as having an", "] for type in types: self[ type ] = handler", ") ) # the final array data-type classes... class GLclampdArray(", "False @classmethod def getRegistry( cls ): \"\"\"Get our handler registry\"\"\"", "OpenGL_accelerate\" ) if ADT is None: # Python-coded version class", "returnHandler = GLOBAL_REGISTRY.get_output_handler isAccelerated = False @classmethod def getRegistry( cls", "if self.output_handler is None: if self.preferredOutput is not None: self.output_handler", ")): \"\"\"Array datatype for GLenum types\"\"\" baseType = _types.GLvoidp typeConstant", "= _types.GLchar typeConstant = _types.GL_BYTE GLcharARBArray = GLcharArray class GLshortArray(", "types\"\"\" import ctypes import OpenGL from OpenGL.raw.GL import _types from", "_log ) ) def arrayToGLType( cls, value ): \"\"\"Given a", "self.output_handler = handler GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY", "type, which means that it is eligible for handling via", "from OpenGL import acceleratesupport ADT = None if acceleratesupport.ACCELERATE_AVAILABLE: try:", "(not even ctypes/numpy ones!)\"\"\" ) return self.output_handler def register( self,", "for base in typ.__mro__: handler = self.get( base ) if", "for GLuint types\"\"\" baseType = _types.GLuint64 typeConstant = _types.GL_UNSIGNED_INT64 class", "[ types ] for type in types: self[ type ]", "typedPointer( cls, value ): \"\"\"Return a pointer-to-base-type pointer for given", "= ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte ) GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort", "_types.GLdouble ) GLfloatArray = ADT( GL_1_1.GL_FLOAT, _types.GLfloat ) GLbyteArray =", "_types.GLsizei ) GLvoidpArray = ADT( _types.GL_VOID_P, _types.GLvoidp ) GL_CONSTANT_TO_ARRAY_TYPE =", "the OpenGL type of the corresponding pointer Note: this is", "None handler = GLOBAL_REGISTRY getHandler = GLOBAL_REGISTRY.__call__ returnHandler = GLOBAL_REGISTRY.get_output_handler", "GLshort types\"\"\" baseType = _types.GLshort typeConstant = _types.GL_SHORT class GLintArray(", "datatype for GLuint types\"\"\" baseType = _types.GLuint typeConstant = _types.GL_UNSIGNED_INT", "= _types.GL_SHORT class GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint )): \"\"\"Array datatype for", "value ): \"\"\"Given a data-value, try to determine number of", ") GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 ) GLenumArray = ADT(", "typeConstant = _types.GL_DOUBLE class GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf )): \"\"\"Array datatype", "not handler: if hasattr( typ, '__mro__' ): for base in", "GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort )): \"\"\"Array datatype for GLshort types\"\"\" baseType", "ADT( GL_1_1.GL_FLOAT, _types.GLfloat ) GLbyteArray = ADT( GL_1_1.GL_BYTE, _types.GLbyte )", "\"\"\"Fast-path lookup for output handler object\"\"\" if self.output_handler is None:", "ctypes.POINTER(_types.GLuint64 )): \"\"\"Array datatype for GLuint types\"\"\" baseType = _types.GLuint64", "handler ): \"\"\"Register this handler as the default return-type handler\"\"\"", "return cls.handler def from_param( cls, value, typeConstant=None ): \"\"\"Given a", "value.__class__ except AttributeError as err: typ = type(value) handler =", "logs.logOnFail( arrayToGLType, _log ) ) def arraySize( cls, value, typeCode", "GLfloat types\"\"\" baseType = _types.GLfloat typeConstant = _types.GL_FLOAT class GLdoubleArray(", "pointer for given value\"\"\" return ctypes.cast( cls.dataPointer(value), ctypes.POINTER( cls.baseType ))", "_types.GLbyte ) GLcharArray = GLcharARBArray = ADT( GL_1_1.GL_BYTE, _types.GLchar )", "None ): \"\"\"Given a data-value, calculate dimensions for the array", "GLsizei types\"\"\" baseType = _types.GLsizei typeConstant = _types.GL_INT class GLvoidpArray(", "accelerated ArrayDatatype' ) ArrayDatatype = ADT( None, None ) GLclampdArray", "name %s found'%(name,)) def get_output_handler( self ): \"\"\"Fast-path lookup for", "ctypes.POINTER(_types.GLbyte )): \"\"\"Array datatype for GLbyte types\"\"\" baseType = _types.GLbyte", "(if possible) Uses our local type if defined, otherwise asks", "raise TypeError( \"\"\"No array-type handler for type %s.%s (value: %s)", "GL_1_1.GL_BYTE : GLbyteArray, GL_1_1.GL_SHORT : GLshortArray, GL_1_1.GL_INT : GLintArray, GL_1_1.GL_UNSIGNED_BYTE", "ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte ) GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort )", "_log ) ) def arrayByteCount( cls, value ): \"\"\"Given a", "['numpy','ctypesarrays'] def __init__( self, plugin_match ): self.match = plugin_match self.output_handler", "dimensions( cls, value ): \"\"\"Given a data-value, get the dimensions", "%s instance %s\"\"\", type(value), value, ) raise dataPointer = classmethod(", "handler raise TypeError( \"\"\"No array-type handler for type %s.%s (value:", "if handler: handler = handler() if handler: self[ typ ]", "cls, value ): \"\"\"Return a pointer-to-base-type pointer for given value\"\"\"", "not currently used in PyOpenGL and may be removed eventually.", "datatype for GLfloat types\"\"\" baseType = _types.GLfloat typeConstant = _types.GL_FLOAT", "handler if handler.isOutput: self.all_output_handlers.append( handler ) def registerReturn( self, handler", "return ctypes.c_void_p(pointer) except TypeError as err: return pointer voidDataPointer =", "\"\"\"Array datatype for GLuint types\"\"\" baseType = _types.GLuint64 typeConstant =", ") ) def arrayByteCount( cls, value ): \"\"\"Given a data-value,", "arrayByteCount = classmethod( logs.logOnFail( arrayByteCount, _log ) ) # the", "should be! class GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64 )): \"\"\"Array datatype for", "(abstraction points for GL array types\"\"\" import ctypes import OpenGL", "local type if defined, otherwise asks the handler to guess...", "): \"\"\"Lookup of handler for given value\"\"\" try: typ =", "value in a known data-pointer type, convert to a ctypes", "number of bytes it's final form occupies For most data-types", "import logs _log = logs.getLog( 'OpenGL.arrays.arraydatatype' ) from OpenGL import", "of the given dimensions filled with zeros\"\"\" return cls.returnHandler().zeros( dims,", "= GLOBAL_REGISTRY class ArrayDatatype( object ): \"\"\"Mix-in for array datatype", "object ): \"\"\"Mix-in for array datatype classes The ArrayDatatype marker", "= _types.GLint typeConstant = _types.GL_INT class GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte )):", "of types\"\"\" if not isinstance( types, (list,tuple)): types = [", "GLubyte types\"\"\" baseType = _types.GLubyte typeConstant = _types.GL_UNSIGNED_BYTE GLbooleanArray =", "handler = self.get( base ) if not handler: handler =", "\"\"\"Array datatype for GLclampf types\"\"\" baseType = _types.GLclampf typeConstant =", "self.output_handler = None self.preferredOutput = None self.all_output_handlers = [] def", "= value.__class__ except AttributeError as err: typ = type(value) handler", "baseType = _types.GLint64 typeConstant = None # TODO: find out", "dimensions for the array (number-of-units)\"\"\" return cls.getHandler(value).arraySize( value, typeCode or", "= GLOBAL_REGISTRY.get_output_handler isAccelerated = False @classmethod def getRegistry( cls ):", "set of types\"\"\" if not isinstance( types, (list,tuple)): types =", "\"\"\"Lookup of handler for given value\"\"\" try: typ = value.__class__", "handler() if handler: self[ typ ] = handler if hasattr(", "typ, '__mro__' ): for base in typ.__mro__: handler = self.get(", "= _types.GLbyte typeConstant = _types.GL_BYTE class GLcharArray( ArrayDatatype, ctypes.c_char_p): \"\"\"Array", "ctypes.POINTER(_types.GLint64 )): \"\"\"Array datatype for GLuint types\"\"\" baseType = _types.GLint64", "_types.GLubyte typeConstant = _types.GL_UNSIGNED_BYTE GLbooleanArray = GLubyteArray class GLushortArray( ArrayDatatype,", "def asArray( cls, value, typeCode=None ): \"\"\"Given a value, convert", "for ARB extension pointers-to-arrays\"\"\" baseType = _types.GLchar typeConstant = _types.GL_BYTE", "an \"array\" type, which means that it is eligible for", "zeros = classmethod( logs.logOnFail( zeros, _log ) ) def dimensions(", "value\"\"\" return ctypes.cast( cls.dataPointer(value), ctypes.POINTER( cls.baseType )) typedPointer = classmethod(", "GLbyte types\"\"\" baseType = _types.GLbyte typeConstant = _types.GL_BYTE class GLcharArray(", "self.match( base ) if handler: handler = handler.load() if handler:", "as having an \"array\" type, which means that it is", "\"\"\"Array datatype for GLushort types\"\"\" baseType = _types.GLushort typeConstant =", "= None ): \"\"\"Given a data-value, calculate dimensions for the", "GLfloatArray = ADT( GL_1_1.GL_FLOAT, _types.GLfloat ) GLbyteArray = ADT( GL_1_1.GL_BYTE,", "_types.GLenum ) GLsizeiArray = ADT( GL_1_1.GL_INT, _types.GLsizei ) GLvoidpArray =", "self.get( base ) if not handler: handler = self.match( base", "via the arrays sub-package and its registered handlers. \"\"\" typeConstant", "GLenum types\"\"\" baseType = _types.GLenum typeConstant = _types.GL_UNSIGNED_INT class GLsizeiArray(", "ctypes.POINTER(_types.GLvoid )): \"\"\"Array datatype for GLenum types\"\"\" baseType = _types.GLvoidp", "GLOBAL_REGISTRY class ArrayDatatype( object ): \"\"\"Mix-in for array datatype classes", "handler if hasattr( handler, 'registerEquivalent' ): handler.registerEquivalent( typ, base )", ") zeros = classmethod( logs.logOnFail( zeros, _log ) ) def", "default return-type handler\"\"\" if isinstance( handler, (str,unicode)): self.preferredOutput = handler", "for preferred in self.GENERIC_OUTPUT_PREFERENCES: self.output_handler = self.handler_by_plugin_name( preferred ) if", "value ): \"\"\"Given a data-value, guess the OpenGL type of", "): \"\"\"Fast-path lookup for output handler object\"\"\" if self.output_handler is", "= [ types ] for type in types: self[ type", "this should be! class GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64 )): \"\"\"Array datatype", "\"\"\"Determine unit size of an array (if possible) Uses our", "else: # Cython-coded array handler _log.info( 'Using accelerated ArrayDatatype' )", ": GLintArray, GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT : GLushortArray, GL_1_1.GL_UNSIGNED_INT :", "filled with zeros\"\"\" return cls.returnHandler().zeros( dims, typeCode or cls.typeConstant )", "types\"\"\" baseType = _types.GLubyte typeConstant = _types.GL_UNSIGNED_BYTE GLbooleanArray = GLubyteArray", "handler self.output_handler = None else: self.preferredOutput = None self.output_handler =", "for given value\"\"\" return ctypes.cast( cls.dataPointer(value), ctypes.POINTER( cls.baseType )) typedPointer", "handler: if hasattr( typ, '__mro__' ): for base in typ.__mro__:", "in a known data-pointer type, return void_p for pointer\"\"\" pointer", "self.match = plugin_match self.output_handler = None self.preferredOutput = None self.all_output_handlers", "ctypes import OpenGL from OpenGL.raw.GL import _types from OpenGL import", "_types.GL_INT class GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid )): \"\"\"Array datatype for GLenum", "is None: if self.preferredOutput is not None: self.output_handler = self.handler_by_plugin_name(", "ImportError as err: return None else: raise RuntimeError( 'No handler", "self.get( typ ) if not handler: if hasattr( typ, '__mro__'", "= GLOBAL_REGISTRY getHandler = GLOBAL_REGISTRY.__call__ returnHandler = GLOBAL_REGISTRY.get_output_handler isAccelerated =", "arraySize() * atomic-unit-size \"\"\" return cls.getHandler(value).arrayByteCount( value ) arrayByteCount =", "= { GL_1_1.GL_DOUBLE : GLclampdArray, GL_1_1.GL_FLOAT : GLclampfArray, GL_1_1.GL_FLOAT :", "ArrayDatatype accelerator from OpenGL_accelerate\" ) if ADT is None: #", "OpenGL import plugins from OpenGL.arrays import formathandler, _arrayconstants as GL_1_1", "is None: # Python-coded version class HandlerRegistry( dict ): GENERIC_OUTPUT_PREFERENCES", ") def arrayByteCount( cls, value ): \"\"\"Given a data-value, try", "GL_1_1 from OpenGL import logs _log = logs.getLog( 'OpenGL.arrays.arraydatatype' )", "in self.GENERIC_OUTPUT_PREFERENCES: self.output_handler = self.handler_by_plugin_name( preferred ) if self.output_handler: break", "self.handler_by_plugin_name( self.preferredOutput ) if not self.output_handler: for preferred in self.GENERIC_OUTPUT_PREFERENCES:", ") GLfloatArray = ADT( GL_1_1.GL_FLOAT, _types.GLfloat ) GLbyteArray = ADT(", "for pointer\"\"\" try: return cls.getHandler(value).dataPointer( value ) except Exception as", "with zeros\"\"\" return cls.returnHandler().zeros( dims, typeCode or cls.typeConstant ) zeros", "if hasattr( handler, 'registerEquivalent' ): handler.registerEquivalent( typ, base ) return", "size of an array (if possible) Uses our local type", ") GLubyteArray = GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte ) GLushortArray", ") if plugin: try: return plugin.load() except ImportError as err:", "GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint )): \"\"\"Array datatype for GLint types\"\"\" baseType", "GLdouble types\"\"\" baseType = _types.GLdouble typeConstant = _types.GL_DOUBLE class GLbyteArray(", "for GLuint types\"\"\" baseType = _types.GLint64 typeConstant = None #", "the handler to guess... \"\"\" return cls.getHandler(value).unitSize( value, typeCode or", "value, convert to preferred array representation\"\"\" return cls.getHandler(value).asArray( value, typeCode", "as ADT except ImportError as err: _log.warn( \"Unable to load", "GL_1_1.GL_DOUBLE : GLdoubleArray, GL_1_1.GL_BYTE : GLbyteArray, GL_1_1.GL_SHORT : GLshortArray, GL_1_1.GL_INT", "typeConstant = _types.GL_UNSIGNED_INT class GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei )): \"\"\"Array datatype", "datatype for GLclampf types\"\"\" baseType = _types.GLclampf typeConstant = _types.GL_FLOAT", ") GLclampdArray = ADT( GL_1_1.GL_DOUBLE, _types.GLclampd ) GLclampfArray = ADT(", ": GLfloatArray, GL_1_1.GL_DOUBLE : GLdoubleArray, GL_1_1.GL_BYTE : GLbyteArray, GL_1_1.GL_SHORT :", "if isinstance( handler, (str,unicode)): self.preferredOutput = handler self.output_handler = None", "not None: self.output_handler = self.handler_by_plugin_name( self.preferredOutput ) if not self.output_handler:", "value ) try: return ctypes.c_void_p(pointer) except TypeError as err: return", "types\"\"\" baseType = _types.GLushort typeConstant = _types.GL_UNSIGNED_SHORT class GLuintArray( ArrayDatatype,", "return handler def handler_by_plugin_name( self, name ): plugin = plugins.FormatHandler.by_name(", "_log = logs.getLog( 'OpenGL.arrays.arraydatatype' ) from OpenGL import acceleratesupport ADT", "ADT except ImportError as err: _log.warn( \"Unable to load ArrayDatatype", "[] def __call__( self, value ): \"\"\"Lookup of handler for", "GL_1_1.GL_UNSIGNED_INT, _types.GLuint ) GLint64Array = ADT( None, _types.GLint64 ) GLuint64Array", "is not currently used in PyOpenGL and may be removed", "self.output_handler: raise RuntimeError( \"\"\"Unable to find any output handler at", "voidDataPointer( cls, value ): \"\"\"Given value in a known data-pointer", "arrayByteCount, _log ) ) # the final array data-type classes...", "version class HandlerRegistry( dict ): GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays'] def __init__(", "handler def handler_by_plugin_name( self, name ): plugin = plugins.FormatHandler.by_name( name", "self.handler_by_plugin_name( preferred ) if self.output_handler: break if not self.output_handler: raise", "typeConstant = _types.GL_INT class GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid )): \"\"\"Array datatype", "GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte ) GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT,", "sub-package and its registered handlers. \"\"\" typeConstant = None handler", "from_param( cls, value, typeConstant=None ): \"\"\"Given a value in a", "repr(value)[:50] ) ) return handler def handler_by_plugin_name( self, name ):", "guess the OpenGL type of the corresponding pointer Note: this", "handling via the arrays sub-package and its registered handlers. \"\"\"", "handler for type %s.%s (value: %s) registered\"\"\"%( typ.__module__, type.__name__, repr(value)[:50]", "dims, typeCode or cls.typeConstant ) zeros = classmethod( logs.logOnFail( zeros,", "typeConstant = _types.GL_SHORT class GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint )): \"\"\"Array datatype", "return cls.getHandler(value).unitSize( value, typeCode or cls.typeConstant ) unitSize = classmethod(", "\"\"\" return cls.getHandler(value).unitSize( value, typeCode or cls.typeConstant ) unitSize =", "cls, value ): \"\"\"Given a value in a known data-pointer", "value, typeCode = None ): \"\"\"Given a data-value, calculate dimensions", "array (if possible) Uses our local type if defined, otherwise", "= ADT( GL_1_1.GL_INT, _types.GLsizei ) GLvoidpArray = ADT( _types.GL_VOID_P, _types.GLvoidp", "GLdoubleArray, GL_1_1.GL_BYTE : GLbyteArray, GL_1_1.GL_SHORT : GLshortArray, GL_1_1.GL_INT : GLintArray,", "baseType = _types.GLubyte typeConstant = _types.GL_UNSIGNED_BYTE GLbooleanArray = GLubyteArray class", "GL_CONSTANT_TO_ARRAY_TYPE = { GL_1_1.GL_DOUBLE : GLclampdArray, GL_1_1.GL_FLOAT : GLclampfArray, GL_1_1.GL_FLOAT", "value in a known data-pointer type, return void_p for pointer\"\"\"", "= handler if hasattr( handler, 'registerEquivalent' ): handler.registerEquivalent( typ, base", "ctypes.POINTER(_types.GLushort )): \"\"\"Array datatype for GLushort types\"\"\" baseType = _types.GLushort", "plugin = plugins.FormatHandler.by_name( name ) if plugin: try: return plugin.load()", "\"\"\" return cls.getHandler(value).arrayToGLType( value ) arrayToGLType = classmethod( logs.logOnFail( arrayToGLType,", "cls, value, typeConstant=None ): \"\"\"Given a value in a known", "_log ) ) def dataPointer( cls, value ): \"\"\"Given a", "__init__( self, plugin_match ): self.match = plugin_match self.output_handler = None", "cls ): \"\"\"Get our handler registry\"\"\" return cls.handler def from_param(", "registerReturn( self, handler ): \"\"\"Register this handler as the default", "a value in a known data-pointer type, return long for", "ArrayDatatype as ADT except ImportError as err: _log.warn( \"Unable to", "= handler self.output_handler = None else: self.preferredOutput = None self.output_handler", ") if not handler: handler = self.match( base ) if", "base in typ.__mro__: handler = self.get( base ) if not", "import formathandler, _arrayconstants as GL_1_1 from OpenGL import logs _log", "at all (not even ctypes/numpy ones!)\"\"\" ) return self.output_handler def", "ctypes.POINTER(_types.GLdouble )): \"\"\"Array datatype for GLdouble types\"\"\" baseType = _types.GLdouble", "_types.GLvoidp typeConstant = _types.GL_VOID_P else: # Cython-coded array handler _log.info(", "handler = self.get( typ ) if not handler: if hasattr(", "for GLshort types\"\"\" baseType = _types.GLshort typeConstant = _types.GL_SHORT class", "): \"\"\"Mix-in for array datatype classes The ArrayDatatype marker essentially", "datatype for GLenum types\"\"\" baseType = _types.GLenum typeConstant = _types.GL_UNSIGNED_INT", ") arrayToGLType = classmethod( logs.logOnFail( arrayToGLType, _log ) ) def", "GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd )): \"\"\"Array datatype for GLclampd types\"\"\" baseType", "of bytes it's final form occupies For most data-types this", "\"\"\"Array datatype for GLuint types\"\"\" baseType = _types.GLint64 typeConstant =", "cls, value ): \"\"\"Given a data-value, guess the OpenGL type", "typeConstant = _types.GL_BYTE GLcharARBArray = GLcharArray class GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort", ") ) def zeros( cls, dims, typeCode=None ): \"\"\"Allocate a", "in typ.__mro__: handler = self.get( base ) if not handler:", "Exception as err: _log.warn( \"\"\"Failure in dataPointer for %s instance", "= self.handler_by_plugin_name( self.preferredOutput ) if not self.output_handler: for preferred in", "\"\"\"Given a data-value, try to determine number of bytes it's", "baseType = _types.GLclampf typeConstant = _types.GL_FLOAT class GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat", "try: typ = value.__class__ except AttributeError as err: typ =", "pointer = cls.dataPointer( value ) try: return ctypes.c_void_p(pointer) except TypeError", "cls.getHandler(value).arrayToGLType( value ) arrayToGLType = classmethod( logs.logOnFail( arrayToGLType, _log )", "typedPointer = classmethod( typedPointer ) def asArray( cls, value, typeCode=None", "return cls.getHandler(value).asArray( value, typeCode or cls.typeConstant ) asArray = classmethod(", "self[ typ ] = handler if hasattr( handler, 'registerEquivalent' ):", "handler: handler = handler() if handler: self[ typ ] =", "= classmethod( typedPointer ) def asArray( cls, value, typeCode=None ):", "# Cython-coded array handler _log.info( 'Using accelerated ArrayDatatype' ) ArrayDatatype", "to load ArrayDatatype accelerator from OpenGL_accelerate\" ) if ADT is", ") ) return handler def handler_by_plugin_name( self, name ): plugin", "= ADT( GL_1_1.GL_SHORT, _types.GLshort ) GLintArray = ADT( GL_1_1.GL_INT, _types.GLint", "handler.isOutput: self.all_output_handlers.append( handler ) def registerReturn( self, handler ): \"\"\"Register", "GLclampf types\"\"\" baseType = _types.GLclampf typeConstant = _types.GL_FLOAT class GLfloatArray(", "\"\"\"Array datatype for GLshort types\"\"\" baseType = _types.GLshort typeConstant =", "def voidDataPointer( cls, value ): \"\"\"Given value in a known", "types\"\"\" baseType = _types.GLuint typeConstant = _types.GL_UNSIGNED_INT class GLint64Array( ArrayDatatype,", "= handler.load() if handler: handler = handler() if handler: self[", "ArrayDatatype, ctypes.POINTER(_types.GLuint )): \"\"\"Array datatype for GLuint types\"\"\" baseType =", ") GLbyteArray = ADT( GL_1_1.GL_BYTE, _types.GLbyte ) GLcharArray = GLcharARBArray", "self.output_handler: break if not self.output_handler: raise RuntimeError( \"\"\"Unable to find", "cls.dataPointer( value ) try: return ctypes.c_void_p(pointer) except TypeError as err:", "\"\"\"No array-type handler for type %s.%s (value: %s) registered\"\"\"%( typ.__module__,", "= _types.GLubyte typeConstant = _types.GL_UNSIGNED_BYTE GLbooleanArray = GLubyteArray class GLushortArray(", "typ, base ) return handler raise TypeError( \"\"\"No array-type handler", "value, typeCode or cls.typeConstant ) asArray = classmethod( logs.logOnFail( asArray,", ")): \"\"\"Array datatype for GLubyte types\"\"\" baseType = _types.GLubyte typeConstant", "ctypes.POINTER(_types.GLfloat )): \"\"\"Array datatype for GLfloat types\"\"\" baseType = _types.GLfloat", "_types.GLuint64 typeConstant = _types.GL_UNSIGNED_INT64 class GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum )): \"\"\"Array", "if acceleratesupport.ACCELERATE_AVAILABLE: try: from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT except", "find out what this should be! class GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64", "_types.GL_DOUBLE class GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf )): \"\"\"Array datatype for GLclampf", "return void_p for pointer\"\"\" pointer = cls.dataPointer( value ) try:", "isAccelerated = False @classmethod def getRegistry( cls ): \"\"\"Get our", "isinstance( types, (list,tuple)): types = [ types ] for type", "OpenGL from OpenGL.raw.GL import _types from OpenGL import plugins from", "GLcharARBArray = ADT( GL_1_1.GL_BYTE, _types.GLchar ) GLshortArray = ADT( GL_1_1.GL_SHORT,", "pointer voidDataPointer = classmethod( logs.logOnFail( voidDataPointer, _log ) ) def", ") if not handler: if hasattr( typ, '__mro__' ): for", "a known data-pointer type, return long for pointer\"\"\" try: return", "value, typeCode or cls.typeConstant ) arraySize = classmethod( logs.logOnFail( arraySize,", ")): \"\"\"Array datatype for GLsizei types\"\"\" baseType = _types.GLsizei typeConstant", "= _types.GL_UNSIGNED_INT64 class GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum )): \"\"\"Array datatype for", "given set of types\"\"\" if not isinstance( types, (list,tuple)): types", "): \"\"\"Given a data-value, calculate dimensions for the array (number-of-units)\"\"\"", "cls.getHandler(value).from_param( value, cls.typeConstant ) from_param = classmethod( logs.logOnFail( from_param, _log", "= cls.dataPointer( value ) try: return ctypes.c_void_p(pointer) except TypeError as", "argument as having an \"array\" type, which means that it", "array data-type classes... class GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd )): \"\"\"Array datatype", "and its registered handlers. \"\"\" typeConstant = None handler =", "\"\"\" typeConstant = None handler = GLOBAL_REGISTRY getHandler = GLOBAL_REGISTRY.__call__", "ArrayDatatype marker essentially is used to mark a particular argument", "typeConstant = _types.GL_UNSIGNED_INT64 class GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum )): \"\"\"Array datatype", "return pointer voidDataPointer = classmethod( logs.logOnFail( voidDataPointer, _log ) )", "= _types.GL_INT class GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid )): \"\"\"Array datatype for", "types\"\"\" baseType = _types.GLint typeConstant = _types.GL_INT class GLubyteArray( ArrayDatatype,", "err: _log.warn( \"\"\"Failure in dataPointer for %s instance %s\"\"\", type(value),", "of the corresponding pointer Note: this is not currently used", "value ): \"\"\"Given a data-value, get the dimensions (assumes full", "be removed eventually. \"\"\" return cls.getHandler(value).arrayToGLType( value ) arrayToGLType =", "typeConstant = _types.GL_FLOAT class GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat )): \"\"\"Array datatype", "a pointer-to-base-type pointer for given value\"\"\" return ctypes.cast( cls.dataPointer(value), ctypes.POINTER(", "GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum ) GLsizeiArray = ADT( GL_1_1.GL_INT,", "(assumes full structure info)\"\"\" return cls.getHandler(value).dimensions( value ) dimensions =", "if defined, otherwise asks the handler to guess... \"\"\" return", "handler: self[ typ ] = handler if hasattr( handler, 'registerEquivalent'", "ctypes.c_char_p): \"\"\"Array datatype for ARB extension pointers-to-arrays\"\"\" baseType = _types.GLchar", "name ): plugin = plugins.FormatHandler.by_name( name ) if plugin: try:", "def dataPointer( cls, value ): \"\"\"Given a value in a", ") ) def typedPointer( cls, value ): \"\"\"Return a pointer-to-base-type", "in a known data-pointer type, convert to a ctypes pointer\"\"\"", ") GLclampfArray = ADT( GL_1_1.GL_FLOAT, _types.GLclampf ) GLdoubleArray = ADT(", "ArrayDatatype, ctypes.POINTER(_types.GLsizei )): \"\"\"Array datatype for GLsizei types\"\"\" baseType =", "\"\"\"Array datatype for ARB extension pointers-to-arrays\"\"\" baseType = _types.GLchar typeConstant", "GLbyteArray, GL_1_1.GL_SHORT : GLshortArray, GL_1_1.GL_INT : GLintArray, GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray,", "\"\"\"Get our handler registry\"\"\" return cls.handler def from_param( cls, value,", "cls.baseType )) typedPointer = classmethod( typedPointer ) def asArray( cls,", "formathandler, _arrayconstants as GL_1_1 from OpenGL import logs _log =", "cls.handler def from_param( cls, value, typeConstant=None ): \"\"\"Given a value", "raise RuntimeError( \"\"\"Unable to find any output handler at all", ") def dimensions( cls, value ): \"\"\"Given a data-value, get", "ctypes.POINTER(_types.GLsizei )): \"\"\"Array datatype for GLsizei types\"\"\" baseType = _types.GLsizei", "): \"\"\"Allocate a return array of the given dimensions filled", "= classmethod( logs.logOnFail( unitSize, _log ) ) def zeros( cls,", "self.all_output_handlers.append( handler ) def registerReturn( self, handler ): \"\"\"Register this", "def handler_by_plugin_name( self, name ): plugin = plugins.FormatHandler.by_name( name )", "return long for pointer\"\"\" try: return cls.getHandler(value).dataPointer( value ) except", "is arraySize() * atomic-unit-size \"\"\" return cls.getHandler(value).arrayByteCount( value ) arrayByteCount", "GL_1_1.GL_FLOAT, _types.GLfloat ) GLbyteArray = ADT( GL_1_1.GL_BYTE, _types.GLbyte ) GLcharArray", "class GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd )): \"\"\"Array datatype for GLclampd types\"\"\"", "calculate dimensions for the array (number-of-units)\"\"\" return cls.getHandler(value).arraySize( value, typeCode", "types\"\"\" baseType = _types.GLclampd typeConstant = _types.GL_DOUBLE class GLclampfArray( ArrayDatatype,", "TODO: find out what this should be! class GLuint64Array( ArrayDatatype,", "typeConstant = _types.GL_UNSIGNED_BYTE GLbooleanArray = GLubyteArray class GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort", "as err: _log.warn( \"Unable to load ArrayDatatype accelerator from OpenGL_accelerate\"", "our handler registry\"\"\" return cls.handler def from_param( cls, value, typeConstant=None", "def dimensions( cls, value ): \"\"\"Given a data-value, get the", ") if ADT is None: # Python-coded version class HandlerRegistry(", "return None else: raise RuntimeError( 'No handler of name %s", "find any output handler at all (not even ctypes/numpy ones!)\"\"\"", "type if defined, otherwise asks the handler to guess... \"\"\"", "= None self.all_output_handlers = [] def __call__( self, value ):", "is not None: self.output_handler = self.handler_by_plugin_name( self.preferredOutput ) if not", "): \"\"\"Given a data-value, guess the OpenGL type of the", "): \"\"\"Given value in a known data-pointer type, return void_p", "preferred ) if self.output_handler: break if not self.output_handler: raise RuntimeError(", ") GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort ) GLuintArray = ADT(", "RuntimeError( \"\"\"Unable to find any output handler at all (not", "having an \"array\" type, which means that it is eligible", ") def voidDataPointer( cls, value ): \"\"\"Given value in a", "base ) if handler: handler = handler.load() if handler: handler", "None, None ) GLclampdArray = ADT( GL_1_1.GL_DOUBLE, _types.GLclampd ) GLclampfArray", "logs.getLog( 'OpenGL.arrays.arraydatatype' ) from OpenGL import acceleratesupport ADT = None", "ADT( GL_1_1.GL_DOUBLE, _types.GLdouble ) GLfloatArray = ADT( GL_1_1.GL_FLOAT, _types.GLfloat )", "except Exception as err: _log.warn( \"\"\"Failure in dataPointer for %s", "for array datatype classes The ArrayDatatype marker essentially is used", ") raise dataPointer = classmethod( logs.logOnFail( dataPointer, _log ) )", "Cython-coded array handler _log.info( 'Using accelerated ArrayDatatype' ) ArrayDatatype =", "\"\"\"Register this handler as the default return-type handler\"\"\" if isinstance(", "def from_param( cls, value, typeConstant=None ): \"\"\"Given a value in", "final form occupies For most data-types this is arraySize() *", "corresponding pointer Note: this is not currently used in PyOpenGL", "from OpenGL.arrays import formathandler, _arrayconstants as GL_1_1 from OpenGL import", ")): \"\"\"Array datatype for GLushort types\"\"\" baseType = _types.GLushort typeConstant", "cls.typeConstant ) asArray = classmethod( logs.logOnFail( asArray, _log ) )", "self.output_handler = None else: self.preferredOutput = None self.output_handler = handler", "GLint64Array = ADT( None, _types.GLint64 ) GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64,", "types ] for type in types: self[ type ] =", "removed eventually. \"\"\" return cls.getHandler(value).arrayToGLType( value ) arrayToGLType = classmethod(", "array handler _log.info( 'Using accelerated ArrayDatatype' ) ArrayDatatype = ADT(", "GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort ) GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint ) GLint64Array", "cls.typeConstant ) unitSize = classmethod( logs.logOnFail( unitSize, _log ) )", "self.preferredOutput ) if not self.output_handler: for preferred in self.GENERIC_OUTPUT_PREFERENCES: self.output_handler", "_log ) ) def arraySize( cls, value, typeCode = None", "for GLubyte types\"\"\" baseType = _types.GLubyte typeConstant = _types.GL_UNSIGNED_BYTE GLbooleanArray", "array (number-of-units)\"\"\" return cls.getHandler(value).arraySize( value, typeCode or cls.typeConstant ) arraySize", "= plugins.FormatHandler.by_name( name ) if plugin: try: return plugin.load() except", "handler for given set of types\"\"\" if not isinstance( types,", "types, (list,tuple)): types = [ types ] for type in", "\"Unable to load ArrayDatatype accelerator from OpenGL_accelerate\" ) if ADT", "the default return-type handler\"\"\" if isinstance( handler, (str,unicode)): self.preferredOutput =", "not self.output_handler: raise RuntimeError( \"\"\"Unable to find any output handler", "marker essentially is used to mark a particular argument as", "convert to preferred array representation\"\"\" return cls.getHandler(value).asArray( value, typeCode or", "\"\"\"Given a data-value, calculate dimensions for the array (number-of-units)\"\"\" return", "import ctypes import OpenGL from OpenGL.raw.GL import _types from OpenGL", "used in PyOpenGL and may be removed eventually. \"\"\" return", "to a ctypes pointer\"\"\" return cls.getHandler(value).from_param( value, cls.typeConstant ) from_param", "GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 ) GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT,", "= _types.GLvoidp typeConstant = _types.GL_VOID_P else: # Cython-coded array handler", "ADT( GL_1_1.GL_DOUBLE, _types.GLclampd ) GLclampfArray = ADT( GL_1_1.GL_FLOAT, _types.GLclampf )", "= _types.GL_BYTE GLcharARBArray = GLcharArray class GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort )):", "typ = value.__class__ except AttributeError as err: typ = type(value)", "value, typeCode=None ): \"\"\"Determine unit size of an array (if", "arrayByteCount( cls, value ): \"\"\"Given a data-value, try to determine", "given value\"\"\" return ctypes.cast( cls.dataPointer(value), ctypes.POINTER( cls.baseType )) typedPointer =", "TypeError as err: return pointer voidDataPointer = classmethod( logs.logOnFail( voidDataPointer,", "ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint ) GLint64Array = ADT( None, _types.GLint64 )", "else: self.preferredOutput = None self.output_handler = handler GLOBAL_REGISTRY = HandlerRegistry(", "for type in types: self[ type ] = handler if", "acceleratesupport ADT = None if acceleratesupport.ACCELERATE_AVAILABLE: try: from OpenGL_accelerate.arraydatatype import", "defined, otherwise asks the handler to guess... \"\"\" return cls.getHandler(value).unitSize(", "classmethod( logs.logOnFail( arraySize, _log ) ) def unitSize( cls, value,", "class GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble )): \"\"\"Array datatype for GLdouble types\"\"\"", "GL_1_1.GL_BYTE, _types.GLchar ) GLshortArray = ADT( GL_1_1.GL_SHORT, _types.GLshort ) GLintArray", "dims, typeCode=None ): \"\"\"Allocate a return array of the given", "typeCode=None ): \"\"\"Determine unit size of an array (if possible)", ")) typedPointer = classmethod( typedPointer ) def asArray( cls, value,", "base ) if not handler: handler = self.match( base )", "cls.typeConstant ) zeros = classmethod( logs.logOnFail( zeros, _log ) )", "if self.preferredOutput is not None: self.output_handler = self.handler_by_plugin_name( self.preferredOutput )", "data-type implementations (abstraction points for GL array types\"\"\" import ctypes", "): GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays'] def __init__( self, plugin_match ): self.match", "zeros( cls, dims, typeCode=None ): \"\"\"Allocate a return array of", "the corresponding pointer Note: this is not currently used in", "datatype for GLuint types\"\"\" baseType = _types.GLint64 typeConstant = None", "Uses our local type if defined, otherwise asks the handler", "def arrayByteCount( cls, value ): \"\"\"Given a data-value, try to", "= _types.GLfloat typeConstant = _types.GL_FLOAT class GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble )):", "self, handler ): \"\"\"Register this handler as the default return-type", "data-pointer type, convert to a ctypes pointer\"\"\" return cls.getHandler(value).from_param( value,", "'Using accelerated ArrayDatatype' ) ArrayDatatype = ADT( None, None )", "GLintArray, GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT : GLushortArray, GL_1_1.GL_UNSIGNED_INT : GLuintArray,", "self.output_handler is None: if self.preferredOutput is not None: self.output_handler =", "not self.output_handler: for preferred in self.GENERIC_OUTPUT_PREFERENCES: self.output_handler = self.handler_by_plugin_name( preferred", "class GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort )): \"\"\"Array datatype for GLushort types\"\"\"", "cls, value ): \"\"\"Given value in a known data-pointer type,", "_log ) ) def zeros( cls, dims, typeCode=None ): \"\"\"Allocate", "handler _log.info( 'Using accelerated ArrayDatatype' ) ArrayDatatype = ADT( None,", "classmethod( logs.logOnFail( zeros, _log ) ) def dimensions( cls, value", "None else: raise RuntimeError( 'No handler of name %s found'%(name,))", ") ) def arrayToGLType( cls, value ): \"\"\"Given a data-value,", "): \"\"\"Determine unit size of an array (if possible) Uses", "GLushort types\"\"\" baseType = _types.GLushort typeConstant = _types.GL_UNSIGNED_SHORT class GLuintArray(", "in a known data-pointer type, return long for pointer\"\"\" try:", "%s) registered\"\"\"%( typ.__module__, type.__name__, repr(value)[:50] ) ) return handler def", "asArray, _log ) ) def arrayToGLType( cls, value ): \"\"\"Given", "as err: return pointer voidDataPointer = classmethod( logs.logOnFail( voidDataPointer, _log", "handler to guess... \"\"\" return cls.getHandler(value).unitSize( value, typeCode or cls.typeConstant", "handler registry\"\"\" return cls.handler def from_param( cls, value, typeConstant=None ):", "array of the given dimensions filled with zeros\"\"\" return cls.returnHandler().zeros(", "] = handler if hasattr( handler, 'registerEquivalent' ): handler.registerEquivalent( typ,", "may be removed eventually. \"\"\" return cls.getHandler(value).arrayToGLType( value ) arrayToGLType", "cls.getHandler(value).arrayByteCount( value ) arrayByteCount = classmethod( logs.logOnFail( arrayByteCount, _log )", "arrayToGLType = classmethod( logs.logOnFail( arrayToGLType, _log ) ) def arraySize(", "classmethod( logs.logOnFail( asArray, _log ) ) def arrayToGLType( cls, value", "a value, convert to preferred array representation\"\"\" return cls.getHandler(value).asArray( value,", "bytes it's final form occupies For most data-types this is", "to preferred array representation\"\"\" return cls.getHandler(value).asArray( value, typeCode or cls.typeConstant", "GL_1_1.GL_UNSIGNED_INT, _types.GLenum ) GLsizeiArray = ADT( GL_1_1.GL_INT, _types.GLsizei ) GLvoidpArray", "_log ) ) def typedPointer( cls, value ): \"\"\"Return a", ") from OpenGL import acceleratesupport ADT = None if acceleratesupport.ACCELERATE_AVAILABLE:", "= GLcharArray class GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort )): \"\"\"Array datatype for", "_types.GLint64 ) GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 ) GLenumArray =", "handler.load() if handler: handler = handler() if handler: self[ typ", "determine number of bytes it's final form occupies For most", "ADT( GL_1_1.GL_BYTE, _types.GLbyte ) GLcharArray = GLcharARBArray = ADT( GL_1_1.GL_BYTE,", "if not handler: if hasattr( typ, '__mro__' ): for base", "return cls.getHandler(value).dataPointer( value ) except Exception as err: _log.warn( \"\"\"Failure", ") GL_CONSTANT_TO_ARRAY_TYPE = { GL_1_1.GL_DOUBLE : GLclampdArray, GL_1_1.GL_FLOAT : GLclampfArray,", "GLclampd types\"\"\" baseType = _types.GLclampd typeConstant = _types.GL_DOUBLE class GLclampfArray(", "classmethod( logs.logOnFail( dataPointer, _log ) ) def voidDataPointer( cls, value", "the arrays sub-package and its registered handlers. \"\"\" typeConstant =", "= handler GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY class", "value, ) raise dataPointer = classmethod( logs.logOnFail( dataPointer, _log )", "Note: this is not currently used in PyOpenGL and may", "dimensions filled with zeros\"\"\" return cls.returnHandler().zeros( dims, typeCode or cls.typeConstant", "for GLsizei types\"\"\" baseType = _types.GLsizei typeConstant = _types.GL_INT class", "unitSize, _log ) ) def zeros( cls, dims, typeCode=None ):", ": GLshortArray, GL_1_1.GL_INT : GLintArray, GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT :", "value\"\"\" try: typ = value.__class__ except AttributeError as err: typ", ") GLcharArray = GLcharARBArray = ADT( GL_1_1.GL_BYTE, _types.GLchar ) GLshortArray", "GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort ) GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT,", "\"\"\"Register this class as handler for given set of types\"\"\"", "_types.GL_FLOAT class GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble )): \"\"\"Array datatype for GLdouble", "as err: typ = type(value) handler = self.get( typ )", "ArrayDatatype, ctypes.POINTER(_types.GLbyte )): \"\"\"Array datatype for GLbyte types\"\"\" baseType =", "_types.GL_VOID_P, _types.GLvoidp ) GL_CONSTANT_TO_ARRAY_TYPE = { GL_1_1.GL_DOUBLE : GLclampdArray, GL_1_1.GL_FLOAT", "None: self.output_handler = self.handler_by_plugin_name( self.preferredOutput ) if not self.output_handler: for", "instance %s\"\"\", type(value), value, ) raise dataPointer = classmethod( logs.logOnFail(", "self.preferredOutput = None self.all_output_handlers = [] def __call__( self, value", "= ['numpy','ctypesarrays'] def __init__( self, plugin_match ): self.match = plugin_match", "try: return cls.getHandler(value).dataPointer( value ) except Exception as err: _log.warn(", "ADT( GL_1_1.GL_FLOAT, _types.GLclampf ) GLdoubleArray = ADT( GL_1_1.GL_DOUBLE, _types.GLdouble )", "_types.GLint ) GLubyteArray = GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte )", "\"\"\"Mix-in for array datatype classes The ArrayDatatype marker essentially is", "GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid )): \"\"\"Array datatype for GLenum types\"\"\" baseType", "types = [ types ] for type in types: self[", "asks the handler to guess... \"\"\" return cls.getHandler(value).unitSize( value, typeCode", "self.preferredOutput = None self.output_handler = handler GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match)", "typeConstant = None # TODO: find out what this should", ") except Exception as err: _log.warn( \"\"\"Failure in dataPointer for", "if ADT is None: # Python-coded version class HandlerRegistry( dict", "and may be removed eventually. \"\"\" return cls.getHandler(value).arrayToGLType( value )", "_types.GLclampd ) GLclampfArray = ADT( GL_1_1.GL_FLOAT, _types.GLclampf ) GLdoubleArray =", "GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte )): \"\"\"Array datatype for GLubyte types\"\"\" baseType", "plugins from OpenGL.arrays import formathandler, _arrayconstants as GL_1_1 from OpenGL", "typeCode=None ): \"\"\"Given a value, convert to preferred array representation\"\"\"", "typeConstant = _types.GL_DOUBLE class GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte )): \"\"\"Array datatype", "out what this should be! class GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64 )):", ": GLclampfArray, GL_1_1.GL_FLOAT : GLfloatArray, GL_1_1.GL_DOUBLE : GLdoubleArray, GL_1_1.GL_BYTE :", "arraySize, _log ) ) def unitSize( cls, value, typeCode=None ):", "= classmethod( logs.logOnFail( arrayByteCount, _log ) ) # the final", "from OpenGL_accelerate\" ) if ADT is None: # Python-coded version", ") GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint ) GLint64Array = ADT(", "import plugins from OpenGL.arrays import formathandler, _arrayconstants as GL_1_1 from", "= ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint ) GLint64Array = ADT( None, _types.GLint64", "None self.all_output_handlers = [] def __call__( self, value ): \"\"\"Lookup", "preferred array representation\"\"\" return cls.getHandler(value).asArray( value, typeCode or cls.typeConstant )", "arraySize( cls, value, typeCode = None ): \"\"\"Given a data-value,", "= _types.GLushort typeConstant = _types.GL_UNSIGNED_SHORT class GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint )):", "means that it is eligible for handling via the arrays", "= _types.GLsizei typeConstant = _types.GL_INT class GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid )):", "possible) Uses our local type if defined, otherwise asks the", "_types.GL_UNSIGNED_INT64 class GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum )): \"\"\"Array datatype for GLenum", "ctypes.POINTER(_types.GLclampf )): \"\"\"Array datatype for GLclampf types\"\"\" baseType = _types.GLclampf", "output handler at all (not even ctypes/numpy ones!)\"\"\" ) return", "dimensions = classmethod( logs.logOnFail( dimensions, _log ) ) def arrayByteCount(", "= classmethod( logs.logOnFail( asArray, _log ) ) def arrayToGLType( cls,", "value ): \"\"\"Given a value in a known data-pointer type,", "handler GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY class ArrayDatatype(", ") def zeros( cls, dims, typeCode=None ): \"\"\"Allocate a return", "GL_1_1.GL_INT : GLintArray, GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT : GLushortArray, GL_1_1.GL_UNSIGNED_INT", "handler, 'registerEquivalent' ): handler.registerEquivalent( typ, base ) return handler raise", ": GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT : GLushortArray, GL_1_1.GL_UNSIGNED_INT : GLuintArray, #GL_1_1.GL_UNSIGNED_INT :", "GLOBAL_REGISTRY.__call__ returnHandler = GLOBAL_REGISTRY.get_output_handler isAccelerated = False @classmethod def getRegistry(", "voidDataPointer, _log ) ) def typedPointer( cls, value ): \"\"\"Return", ") return handler raise TypeError( \"\"\"No array-type handler for type", "= None self.output_handler = handler GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY", "convert to a ctypes pointer\"\"\" return cls.getHandler(value).from_param( value, cls.typeConstant )", "baseType = _types.GLclampd typeConstant = _types.GL_DOUBLE class GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf", "handler for given value\"\"\" try: typ = value.__class__ except AttributeError", ") dimensions = classmethod( logs.logOnFail( dimensions, _log ) ) def", "): plugin = plugins.FormatHandler.by_name( name ) if plugin: try: return", ") def unitSize( cls, value, typeCode=None ): \"\"\"Determine unit size", "return plugin.load() except ImportError as err: return None else: raise", "GLuint types\"\"\" baseType = _types.GLint64 typeConstant = None # TODO:", "array representation\"\"\" return cls.getHandler(value).asArray( value, typeCode or cls.typeConstant ) asArray", "classmethod( typedPointer ) def asArray( cls, value, typeCode=None ): \"\"\"Given", "def zeros( cls, dims, typeCode=None ): \"\"\"Allocate a return array", "if not self.output_handler: for preferred in self.GENERIC_OUTPUT_PREFERENCES: self.output_handler = self.handler_by_plugin_name(", "_types.GLuint ) GLint64Array = ADT( None, _types.GLint64 ) GLuint64Array =", "_types.GLclampf ) GLdoubleArray = ADT( GL_1_1.GL_DOUBLE, _types.GLdouble ) GLfloatArray =", "_types.GL_DOUBLE class GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte )): \"\"\"Array datatype for GLbyte", "class GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64 )): \"\"\"Array datatype for GLuint types\"\"\"", "type of the corresponding pointer Note: this is not currently", "unit size of an array (if possible) Uses our local", "handler at all (not even ctypes/numpy ones!)\"\"\" ) return self.output_handler", "\"\"\"Allocate a return array of the given dimensions filled with", "= type(value) handler = self.get( typ ) if not handler:", "any output handler at all (not even ctypes/numpy ones!)\"\"\" )", "type, return long for pointer\"\"\" try: return cls.getHandler(value).dataPointer( value )", "this is arraySize() * atomic-unit-size \"\"\" return cls.getHandler(value).arrayByteCount( value )", "cls.getHandler(value).dataPointer( value ) except Exception as err: _log.warn( \"\"\"Failure in", "cls.getHandler(value).dimensions( value ) dimensions = classmethod( logs.logOnFail( dimensions, _log )", "= None handler = GLOBAL_REGISTRY getHandler = GLOBAL_REGISTRY.__call__ returnHandler =", "types\"\"\" baseType = _types.GLint64 typeConstant = None # TODO: find", "ArrayDatatype, ctypes.POINTER(_types.GLdouble )): \"\"\"Array datatype for GLdouble types\"\"\" baseType =", "\"array\" type, which means that it is eligible for handling", "OpenGL.arrays import formathandler, _arrayconstants as GL_1_1 from OpenGL import logs", "= None else: self.preferredOutput = None self.output_handler = handler GLOBAL_REGISTRY", "ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum ) GLsizeiArray = ADT( GL_1_1.GL_INT, _types.GLsizei )", "class GLcharArray( ArrayDatatype, ctypes.c_char_p): \"\"\"Array datatype for ARB extension pointers-to-arrays\"\"\"", "ADT( GL_1_1.GL_SHORT, _types.GLshort ) GLintArray = ADT( GL_1_1.GL_INT, _types.GLint )", ") from_param = classmethod( logs.logOnFail( from_param, _log ) ) def", "typeCode or cls.typeConstant ) asArray = classmethod( logs.logOnFail( asArray, _log", "logs.logOnFail( dimensions, _log ) ) def arrayByteCount( cls, value ):", "GLcharArray( ArrayDatatype, ctypes.c_char_p): \"\"\"Array datatype for ARB extension pointers-to-arrays\"\"\" baseType", "(number-of-units)\"\"\" return cls.getHandler(value).arraySize( value, typeCode or cls.typeConstant ) arraySize =", "import ArrayDatatype as ADT except ImportError as err: _log.warn( \"Unable", "an array (if possible) Uses our local type if defined,", "= ADT( GL_1_1.GL_INT, _types.GLint ) GLubyteArray = GLbooleanArray = ADT(", "data-types this is arraySize() * atomic-unit-size \"\"\" return cls.getHandler(value).arrayByteCount( value", "GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint )): \"\"\"Array datatype for GLuint types\"\"\" baseType", "__call__( self, value ): \"\"\"Lookup of handler for given value\"\"\"", "plugin.load() except ImportError as err: return None else: raise RuntimeError(", "registered\"\"\"%( typ.__module__, type.__name__, repr(value)[:50] ) ) return handler def handler_by_plugin_name(", "OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT except ImportError as err: _log.warn(", "types=None ): \"\"\"Register this class as handler for given set", ") if not self.output_handler: for preferred in self.GENERIC_OUTPUT_PREFERENCES: self.output_handler =", "= _types.GL_DOUBLE class GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte )): \"\"\"Array datatype for", "GLcharARBArray = GLcharArray class GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort )): \"\"\"Array datatype", ") GLint64Array = ADT( None, _types.GLint64 ) GLuint64Array = ADT(", "= _types.GLenum typeConstant = _types.GL_UNSIGNED_INT class GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei )):", "_types.GLuint64 ) GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum ) GLsizeiArray =", "data-value, get the dimensions (assumes full structure info)\"\"\" return cls.getHandler(value).dimensions(", "except ImportError as err: _log.warn( \"Unable to load ArrayDatatype accelerator", "GLclampdArray, GL_1_1.GL_FLOAT : GLclampfArray, GL_1_1.GL_FLOAT : GLfloatArray, GL_1_1.GL_DOUBLE : GLdoubleArray,", "ADT( GL_1_1.GL_INT, _types.GLsizei ) GLvoidpArray = ADT( _types.GL_VOID_P, _types.GLvoidp )", ")): \"\"\"Array datatype for GLenum types\"\"\" baseType = _types.GLenum typeConstant", "full structure info)\"\"\" return cls.getHandler(value).dimensions( value ) dimensions = classmethod(", "_types.GL_BYTE class GLcharArray( ArrayDatatype, ctypes.c_char_p): \"\"\"Array datatype for ARB extension", "= _types.GL_BYTE class GLcharArray( ArrayDatatype, ctypes.c_char_p): \"\"\"Array datatype for ARB", "_types.GLchar typeConstant = _types.GL_BYTE GLcharARBArray = GLcharArray class GLshortArray( ArrayDatatype,", "_log ) ) # the final array data-type classes... class", "data-value, try to determine number of bytes it's final form", "self.output_handler = self.handler_by_plugin_name( self.preferredOutput ) if not self.output_handler: for preferred", "void_p for pointer\"\"\" pointer = cls.dataPointer( value ) try: return", "typeConstant = _types.GL_BYTE class GLcharArray( ArrayDatatype, ctypes.c_char_p): \"\"\"Array datatype for", "= self.match( base ) if handler: handler = handler.load() if", ") ) def dimensions( cls, value ): \"\"\"Given a data-value,", "except ImportError as err: return None else: raise RuntimeError( 'No", "= classmethod( logs.logOnFail( from_param, _log ) ) def dataPointer( cls,", "raise dataPointer = classmethod( logs.logOnFail( dataPointer, _log ) ) def", ") return handler def handler_by_plugin_name( self, name ): plugin =", "\"\"\"Array datatype for GLbyte types\"\"\" baseType = _types.GLbyte typeConstant =", "_types.GLbyte typeConstant = _types.GL_BYTE class GLcharArray( ArrayDatatype, ctypes.c_char_p): \"\"\"Array datatype", "None else: self.preferredOutput = None self.output_handler = handler GLOBAL_REGISTRY =", "the array (number-of-units)\"\"\" return cls.getHandler(value).arraySize( value, typeCode or cls.typeConstant )", "GL_1_1.GL_INT, _types.GLsizei ) GLvoidpArray = ADT( _types.GL_VOID_P, _types.GLvoidp ) GL_CONSTANT_TO_ARRAY_TYPE", "\"\"\"Given value in a known data-pointer type, return void_p for", "\"\"\"Unable to find any output handler at all (not even", "for GLfloat types\"\"\" baseType = _types.GLfloat typeConstant = _types.GL_FLOAT class", "handler = handler() if handler: self[ typ ] = handler", "= _types.GL_UNSIGNED_INT class GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64 )): \"\"\"Array datatype for", "(str,unicode)): self.preferredOutput = handler self.output_handler = None else: self.preferredOutput =", "getRegistry( cls ): \"\"\"Get our handler registry\"\"\" return cls.handler def", "\"\"\"Array data-type implementations (abstraction points for GL array types\"\"\" import", "a data-value, try to determine number of bytes it's final", "datatype for GLbyte types\"\"\" baseType = _types.GLbyte typeConstant = _types.GL_BYTE", "if not self.output_handler: raise RuntimeError( \"\"\"Unable to find any output", "ArrayDatatype, ctypes.POINTER(_types.GLshort )): \"\"\"Array datatype for GLshort types\"\"\" baseType =", "_types.GLfloat ) GLbyteArray = ADT( GL_1_1.GL_BYTE, _types.GLbyte ) GLcharArray =", "ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 ) GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum )", "return-type handler\"\"\" if isinstance( handler, (str,unicode)): self.preferredOutput = handler self.output_handler", "value ) arrayByteCount = classmethod( logs.logOnFail( arrayByteCount, _log ) )", "for given value\"\"\" try: typ = value.__class__ except AttributeError as", "None, _types.GLint64 ) GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 ) GLenumArray", "guess... \"\"\" return cls.getHandler(value).unitSize( value, typeCode or cls.typeConstant ) unitSize", "typeConstant = _types.GL_UNSIGNED_INT class GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64 )): \"\"\"Array datatype", "_types.GL_SHORT class GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint )): \"\"\"Array datatype for GLint", "= _types.GL_INT class GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte )): \"\"\"Array datatype for", "_types.GLenum typeConstant = _types.GL_UNSIGNED_INT class GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei )): \"\"\"Array", "types\"\"\" baseType = _types.GLuint64 typeConstant = _types.GL_UNSIGNED_INT64 class GLenumArray( ArrayDatatype,", "unitSize = classmethod( logs.logOnFail( unitSize, _log ) ) def zeros(", "else: raise RuntimeError( 'No handler of name %s found'%(name,)) def", "%s found'%(name,)) def get_output_handler( self ): \"\"\"Fast-path lookup for output", "to guess... \"\"\" return cls.getHandler(value).unitSize( value, typeCode or cls.typeConstant )", "None self.preferredOutput = None self.all_output_handlers = [] def __call__( self,", "if not isinstance( types, (list,tuple)): types = [ types ]", "baseType = _types.GLint typeConstant = _types.GL_INT class GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte", "ARB extension pointers-to-arrays\"\"\" baseType = _types.GLchar typeConstant = _types.GL_BYTE GLcharARBArray", "arrayToGLType, _log ) ) def arraySize( cls, value, typeCode =", "mark a particular argument as having an \"array\" type, which", "class GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum )): \"\"\"Array datatype for GLenum types\"\"\"", "handler = GLOBAL_REGISTRY getHandler = GLOBAL_REGISTRY.__call__ returnHandler = GLOBAL_REGISTRY.get_output_handler isAccelerated", "value ): \"\"\"Return a pointer-to-base-type pointer for given value\"\"\" return", ") ArrayDatatype = ADT( None, None ) GLclampdArray = ADT(", "info)\"\"\" return cls.getHandler(value).dimensions( value ) dimensions = classmethod( logs.logOnFail( dimensions,", ") GLdoubleArray = ADT( GL_1_1.GL_DOUBLE, _types.GLdouble ) GLfloatArray = ADT(", "type.__name__, repr(value)[:50] ) ) return handler def handler_by_plugin_name( self, name", "cls, value, typeCode = None ): \"\"\"Given a data-value, calculate", "class GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat )): \"\"\"Array datatype for GLfloat types\"\"\"", "ArrayDatatype = ADT( None, None ) GLclampdArray = ADT( GL_1_1.GL_DOUBLE,", "plugin: try: return plugin.load() except ImportError as err: return None", "plugin_match self.output_handler = None self.preferredOutput = None self.all_output_handlers = []", "types\"\"\" baseType = _types.GLenum typeConstant = _types.GL_UNSIGNED_INT class GLsizeiArray( ArrayDatatype,", "as GL_1_1 from OpenGL import logs _log = logs.getLog( 'OpenGL.arrays.arraydatatype'", "typ ] = handler if hasattr( handler, 'registerEquivalent' ): handler.registerEquivalent(", "datatype for GLdouble types\"\"\" baseType = _types.GLdouble typeConstant = _types.GL_DOUBLE", "or cls.typeConstant ) asArray = classmethod( logs.logOnFail( asArray, _log )", "cls.typeConstant ) arraySize = classmethod( logs.logOnFail( arraySize, _log ) )", ") def dataPointer( cls, value ): \"\"\"Given a value in", "GLfloatArray, GL_1_1.GL_DOUBLE : GLdoubleArray, GL_1_1.GL_BYTE : GLbyteArray, GL_1_1.GL_SHORT : GLshortArray,", "\"\"\"Array datatype for GLenum types\"\"\" baseType = _types.GLenum typeConstant =", "points for GL array types\"\"\" import ctypes import OpenGL from", "None ) GLclampdArray = ADT( GL_1_1.GL_DOUBLE, _types.GLclampd ) GLclampfArray =", "for GLdouble types\"\"\" baseType = _types.GLdouble typeConstant = _types.GL_DOUBLE class", ")): \"\"\"Array datatype for GLshort types\"\"\" baseType = _types.GLshort typeConstant", "= ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort ) GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint" ]
[ "`py_test`. shard_count: As with `py_test`. \"\"\" name_tag = \"_{}\".format(name) tags", "in remove_deps)] resolved_omit_deps_numpy = [ _resolve_omit_dep(dep) for dep in substrates_omit_deps", "has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) # if PyCcLinkParamsProvider in dep: # DisableOnExport #", "\"numpy\" not in disabled_substrates: numpy_srcs = _substrate_srcs(srcs, \"numpy\") native.genrule( name", "2.0 (the \"License\"); # you may not use this file", "srcs = srcs, outs = numpy_srcs, cmd = \"$(location {})", "if \"jax\" not in disabled_substrates: jax_srcs = _substrate_srcs(srcs, \"jax\") native.genrule(", "resolved_omit_deps_numpy = [ _resolve_omit_dep(dep) for dep in substrates_omit_deps + numpy_omit_deps", "= [], tags = [], numpy_tags = [], jax_tags =", "dep if \"tensorflow_probability/\" in dep or dep.startswith(\":\"): if \"internal/backend\" in", "timeout, shard_count = shard_count, ) native.test_suite( name = name, tags", "ctx.attr.deps: if PyInfo in dep: transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources)", "for TF/NumPy/JAX-variadic libraries & tests.\"\"\" # [internal] load python3.bzl NO_REWRITE_NEEDED", "substrate will have a dedicated `py2and3_test` suffixed with '.tf', '.numpy',", "As with `py_test`. jax_size: A size override for the JAX", "`f'_{name}'` tag is used to produce the `test_suite`. numpy_tags: Tags", "depset(transitive = imports), has_py2_only_sources = any(has_py2_only_sources), has_py3_only_sources = any(has_py3_only_sources), )", "srcs, outs = numpy_srcs, cmd = \"$(location {}) $(SRCS) >", "if \"internal/backend\" in dep: return dep if \":\" in dep:", "imports = depset(transitive = imports), has_py2_only_sources = any(has_py2_only_sources), has_py3_only_sources =", "jax_srcs = _substrate_srcs(srcs, \"jax\") for src in srcs: native.genrule( name", "python_version = \"PY3\", timeout = timeout, shard_count = shard_count, )", "= srcs, outs = jax_srcs, cmd = \"$(location {}) $(SRCS)", "This rule will aggregate and pass along deps while adding", "[] for dep in ctx.attr.deps: if PyInfo in dep: transitive_sources.append(dep[PyInfo].transitive_sources)", "\"\"\"Convert a single dep to one appropriate for the given", "JAX substrate. numpy_omit_deps: List of deps to omit for the", "with '.tf', '.numpy', or '.jax' as appropriate. size: As with", "Args: name: The TF `py_library` name. NumPy and JAX libraries", "deps: As with `py_library`. The list is rewritten to depend", "[src], outs = [_substrate_src(src, \"jax\")], cmd = \"$(location {}) $(SRCS)", "= any(has_py3_only_sources), ) py_cc_link_info = cc_common.merge_cc_infos(cc_infos = cc_infos) py_runfiles =", "\"PY3\", timeout = timeout, shard_count = shard_count, ) native.test_suite( name", "output structures to pass along Python srcs/deps/etc. py_info = PyInfo(", "Rule analysis context. Returns: Info objects to propagate deps and", "`py_test`. timeout: As with `py_test`. shard_count: As with `py_test`. \"\"\"", "dep.startswith(\":\"): dep_to_check = \"{}{}\".format(native.package_name(), dep) for no_rewrite in NO_REWRITE_NEEDED: if", "shard_count: As with `py_test`. \"\"\" name_tag = \"_{}\".format(name) tags =", "tags, srcs_version = srcs_version, timeout = timeout, shard_count = shard_count,", "the JAX test. (e.g. `\"notap\"`). disabled_substrates: Iterable of substrates to", "jax_tags = [], disabled_substrates = [], srcs_version = \"PY2AND3\", timeout", "License for the specific language governing permissions and # limitations", "dep) return dep def _substrate_runfiles_symlinks_impl(ctx): \"\"\"A custom BUILD rule to", "`test_suite`. numpy_tags: Tags specific to the NumPy test. (e.g. `\"notap\"`).", "rule( implementation = _substrate_runfiles_symlinks_impl, attrs = { \"substrate\": attr.string(), \"deps\":", "to the NumPy test. (e.g. `\"notap\"`). jax_tags: Tags specific to", "{}) $(SRCS) --omit_deps={} --numpy_to_jax > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_jax), ), tools", "dep.startswith(\":\"): if \"internal/backend\" in dep: return dep if \":\" in", "[_substrate_dep(dep, substrate) for dep in deps] backend_dep = \"//tensorflow_probability/python/internal/backend/{}\".format(substrate) if", "py2and3_test and py_test comingling in BUILD files. Otherwise the OSS", "deps if (dep not in substrates_omit_deps and dep not in", "the set of symlinks to generate. transitive_sources = depset(transitive =", "+ jax_omit_deps ] jax_srcs = _substrate_srcs(srcs, \"jax\") for src in", "\"\"\"Rewrite a single src filename for the given substrate.\"\"\" return", "timeout = timeout, shard_count = shard_count, ) native.test_suite( name =", "dep: transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) # if PyCcLinkParamsProvider in", "transitive_sources) runfiles_dict = {} substrate = ctx.attr.substrate file_substr = \"_{}/_generated_\".format(substrate)", "symlinks for files matching a substrate genrule file pattern, i.e.", "this applies to. - deps: A list of py_library labels.", "single dep to one appropriate for the given substrate.\"\"\" dep_to_check", "= jax_srcs, deps = _substrate_deps(trimmed_deps, \"jax\"), srcs_version = srcs_version, testonly", "Probability Authors. # # Licensed under the Apache License, Version", "'.numpy' and '.jax' appended. srcs: As with `py_library`. A `genrule`", "] trimmed_deps = [dep for dep in deps if (dep", "py_test. srcs_version and python_version are added (with value `\"PY3\"`) if", "= transitive_sources, uses_shared_libraries = any(uses_shared_libraries), imports = depset(transitive = imports),", "deps to omit for the NumPy substrate. testonly: As with", "jax_srcs, cmd = \"$(location {}) $(SRCS) --numpy_to_jax > $@\".format(REWRITER_TARGET), tools", "= shard_count, ) native.test_suite( name = name, tags = [name_tag],", "def _substrate_dep(dep, substrate): \"\"\"Convert a single dep to one appropriate", "native.py_library( name = \"{}.numpy.raw\".format(name), srcs = _substrate_srcs(srcs, \"numpy\"), deps =", "ctx: Rule analysis context. Returns: Info objects to propagate deps", "dep: # DisableOnExport # cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport if CcInfo in", "OF ANY KIND, either express or implied. # See the", "See the License for the specific language governing permissions and", "substrates_omit_deps + numpy_omit_deps ] for src in srcs: native.genrule( name", "main = _substrate_src(\"{}.py\".format(name), \"numpy\"), deps = _substrate_deps(deps, \"numpy\"), tags =", "to in writing, software # distributed under the License is", "of symlinks to generate. transitive_sources = depset(transitive = transitive_sources) runfiles_dict", "+ [\"tfp_jax\"] + jax_tags, srcs_version = srcs_version, python_version = \"PY3\",", "in ctx.attr.deps: if PyInfo in dep: transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources)", "irreversible. def py3_test(*args, **kwargs): \"\"\"Internal/external reversibility, denotes py3-only vs py2+3", "`\"notap\"`). disabled_substrates: Iterable of substrates to disable, items from [\"numpy\",", "REWRITER_TARGET, \",\".join(resolved_omit_deps_jax), ), tools = [REWRITER_TARGET], ) native.py_library( name =", "limitations under the License. # ============================================================================ \"\"\"Build defs for TF/NumPy/JAX-variadic", "or agreed to in writing, software # distributed under the", "As with `py_test`. \"\"\" name_tag = \"_{}\".format(name) tags = [t", "py_test comingling in BUILD files. Otherwise the OSS export #", "disabled_substrates: jax_srcs = _substrate_srcs(srcs, \"jax\") native.genrule( name = \"rewrite_{}_jax\".format(name), srcs", "= testonly, ) # Add symlinks under tfp/substrates/jax. substrate_runfiles_symlinks( name", "{ \"substrate\": attr.string(), \"deps\": attr.label_list(), }, ) def multi_substrate_py_library( name,", "`py_library` for each of TF, NumPy, and JAX. Args: name:", "on substrate-specific libraries for substrate variants. tags: Tags global to", "compliance with the License. # You may obtain a copy", "Args: *args: Passed to underlying py_test. **kwargs: Passed to underlying", "\"\"\" name_tag = \"_{}\".format(name) tags = [t for t in", "[\"tfp_jax\"] + jax_tags, srcs_version = srcs_version, python_version = \"PY3\", timeout", "= \"$(location {}) $(SRCS) --omit_deps={} --numpy_to_jax > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_jax),", "gets a `'tfp_jax'` tag. A `f'_{name}'` tag is used to", "= \"{}{}\".format(native.package_name(), dep) for no_rewrite in NO_REWRITE_NEEDED: if no_rewrite in", "a single dep to one appropriate for the given substrate.\"\"\"", "not in dep: dep = \"{}:{}\".format(dep, dep.split(\"/\")[-1]) if dep.startswith(\":\"): dep", "= [], jax_omit_deps = [], numpy_omit_deps = [], testonly =", "underlying py_test. **kwargs: Passed to underlying py_test. srcs_version and python_version", "rewrite NumPy and JAX variants, writing the test file into", "for each of TF, NumPy, and JAX. Args: name: The", "[ _resolve_omit_dep(dep) for dep in substrates_omit_deps + jax_omit_deps ] jax_srcs", "\"$(location {}) $(SRCS) --omit_deps={} --numpy_to_jax > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_jax), ),", "not use this file except in compliance with the License.", "[REWRITER_TARGET], ) py3_test( name = \"{}.numpy\".format(name), size = numpy_size or", "new_deps = [_substrate_dep(dep, substrate) for dep in deps] backend_dep =", "The list is rewritten to depend on substrate-specific libraries for", "= \"{}:{}\".format(dep, dep.split(\"/\")[-1]) if dep.startswith(\":\"): dep = \"{}{}\".format(native.package_name(), dep) return", "= deps, srcs_version = srcs_version, testonly = testonly, ) remove_deps", "omit for the JAX substrate. numpy_omit_deps: List of deps to", "with `py_test`. The list is rewritten to depend on substrate-specific", "you may not use this file except in compliance with", "def _substrate_deps(deps, substrate): \"\"\"Convert deps to those appropriate for the", "numpy_omit_deps = [], testonly = 0, srcs_version = \"PY2AND3\"): \"\"\"A", "srcs_version = srcs_version, testonly = testonly, ) # Add symlinks", "py3-only vs py2+3 tests. Args: *args: Passed to underlying py_test.", "for t in tags] tags.append(name_tag) tags.append(\"multi_substrate\") native.py_test( name = \"{}.tf\".format(name),", "= testonly, ) def multi_substrate_py_test( name, size = \"small\", jax_size", "[dep for dep in deps if (dep not in substrates_omit_deps", "= \"PY3\", timeout = timeout, shard_count = shard_count, ) native.test_suite(", "test. (e.g. `\"notap\"`). disabled_substrates: Iterable of substrates to disable, items", "testonly = testonly, ) remove_deps = [ \"//third_party/py/tensorflow\", \"//third_party/py/tensorflow:tensorflow\", ]", "\"_{}/_generated_{}\".format(substrate, src) def _substrate_srcs(srcs, substrate): \"\"\"Rewrite src filenames for the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "= f # Construct the output structures to pass along", "\"PY3\" if \"python_version\" not in kwargs: kwargs[\"python_version\"] = \"PY3\" native.py_test(*args,", "py3_test( name = \"{}.jax\".format(name), size = jax_size or size, srcs", "the given substrate.\"\"\" return \"_{}/_generated_{}\".format(substrate, src) def _substrate_srcs(srcs, substrate): \"\"\"Rewrite", "= _substrate_srcs(srcs, \"numpy\") native.genrule( name = \"rewrite_{}_numpy\".format(name), srcs = srcs,", "return new_deps # This is needed for the transitional period", "comingling in BUILD files. Otherwise the OSS export # rewrite", "srcs_version = \"PY2AND3\", timeout = None, shard_count = None): \"\"\"A", "testonly, ) resolved_omit_deps_jax = [ _resolve_omit_dep(dep) for dep in substrates_omit_deps", "target.\"\"\" if \":\" not in dep: dep = \"{}:{}\".format(dep, dep.split(\"/\")[-1])", "implementation = _substrate_runfiles_symlinks_impl, attrs = { \"substrate\": attr.string(), \"deps\": attr.label_list(),", "jax_srcs, main = _substrate_src(\"{}.py\".format(name), \"jax\"), deps = jax_deps, tags =", "internal # py2and3_test and py_test comingling in BUILD files. Otherwise", "= { \"substrate\": attr.string(), \"deps\": attr.label_list(), }, ) def multi_substrate_py_library(", "python3.bzl NO_REWRITE_NEEDED = [ \"internal:all_util\", \"internal:docstring_util\", \"internal:reparameterization\", \"layers\", \"platform_google\", ]", "\"{}.py\".format(name), deps = deps, tags = tags, srcs_version = srcs_version,", "used to produce the `test_suite`. numpy_tags: Tags specific to the", "tag is used to produce the `test_suite`. numpy_tags: Tags specific", "libraries have '.numpy' and '.jax' appended. srcs: As with `py_library`.", "target. NumPy also gets a `'tfp_numpy'` tag, and JAX gets", "numpy_tags: Tags specific to the NumPy test. (e.g. `\"notap\"`). jax_tags:", "depset inputs to resolve transitive dependencies. transitive_sources = [] uses_shared_libraries", "= numpy_srcs, cmd = \"$(location {}) $(SRCS) > $@\".format(REWRITER_TARGET), tools", "Info objects to propagate deps and add runfiles symlinks. \"\"\"", ") def multi_substrate_py_library( name, srcs = [], deps = [],", "labels. These are passed along. Args: ctx: Rule analysis context.", "symlinks under tfp/substrates/jax. substrate_runfiles_symlinks( name = \"{}.jax\".format(name), substrate = \"jax\",", "dep: return dep if \":\" in dep: return \"{}.{}\".format(dep, substrate)", "substrate-specific libraries for substrate variants. tags: Tags global to this", "permissions and # limitations under the License. # ============================================================================ \"\"\"Build", "= [_substrate_src(src, \"numpy\")], cmd = \"$(location {}) $(SRCS) --omit_deps={} >", "NumPy and JAX variants, writing the test file into a", "defs for TF/NumPy/JAX-variadic libraries & tests.\"\"\" # [internal] load python3.bzl", "to those appropriate for the given substrate.\"\"\" new_deps = [_substrate_dep(dep,", "> $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_numpy), ), tools = [REWRITER_TARGET], ) native.py_library(", "not specified. \"\"\" kwargs = dict(kwargs) if \"srcs_version\" not in", "runfiles symlinks. \"\"\" # Aggregate the depset inputs to resolve", "srcs_version = srcs_version, timeout = timeout, shard_count = shard_count, )", "[internal] load python3.bzl NO_REWRITE_NEEDED = [ \"internal:all_util\", \"internal:docstring_util\", \"internal:reparameterization\", \"layers\",", "deps = _substrate_deps(deps, \"numpy\"), tags = tags + [\"tfp_numpy\"] +", "timeout = None, shard_count = None): \"\"\"A TFP `py2and3_test` for", "native.genrule( name = \"rewrite_{}_jax\".format(src.replace(\".\", \"_\")), srcs = [src], outs =", "deps to omit if those libraries are not rewritten for", "= dict(kwargs) if \"srcs_version\" not in kwargs: kwargs[\"srcs_version\"] = \"PY3\"", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "List of deps to omit for the NumPy substrate. testonly:", "tags + [\"tfp_jax\"] + jax_tags, srcs_version = srcs_version, python_version =", "These are passed along. Args: ctx: Rule analysis context. Returns:", "a `substrates_omit_deps` item to full target.\"\"\" if \":\" not in", "have a `genrule` emitted to rewrite NumPy and JAX variants,", "\"\"\" kwargs = dict(kwargs) if \"srcs_version\" not in kwargs: kwargs[\"srcs_version\"]", "in kwargs: kwargs[\"srcs_version\"] = \"PY3\" if \"python_version\" not in kwargs:", "timeout, shard_count = shard_count, ) if \"numpy\" not in disabled_substrates:", "for the NumPy substrate. testonly: As with `py_library`. srcs_version: As", "NumPy and JAX libraries have '.numpy' and '.jax' appended. srcs:", "and # limitations under the License. # ============================================================================ \"\"\"Build defs", "attr.label_list(), }, ) def multi_substrate_py_library( name, srcs = [], deps", "file except in compliance with the License. # You may", "srcs = _substrate_srcs(srcs, \"numpy\"), deps = _substrate_deps(trimmed_deps, \"numpy\"), srcs_version =", "src filenames for the given substrate.\"\"\" return [_substrate_src(src, substrate) for", "= [REWRITER_TARGET], ) native.py_library( name = \"{}.jax.raw\".format(name), srcs = jax_srcs,", "not rewritten for the substrates. jax_omit_deps: List of deps to", "}, ) def multi_substrate_py_library( name, srcs = [], deps =", "\"_{}\".format(name) tags = [t for t in tags] tags.append(name_tag) tags.append(\"multi_substrate\")", "NumPy, and JAX. Args: name: Name of the `test_suite` which", "# Aggregate the depset inputs to resolve transitive dependencies. transitive_sources", "objects to propagate deps and add runfiles symlinks. \"\"\" #", "name: The TF `py_library` name. NumPy and JAX libraries have", "srcs_version: As with `py_library`. \"\"\" native.py_library( name = name, srcs", "for the substrates. jax_omit_deps: List of deps to omit for", "`substrates_omit_deps` item to full target.\"\"\" if \":\" not in dep:", "if \"tensorflow_probability/\" in dep or dep.startswith(\":\"): if \"internal/backend\" in dep:", "tools = [REWRITER_TARGET], ) jax_deps = _substrate_deps(deps, \"jax\") # [internal]", "py_info = PyInfo( transitive_sources = transitive_sources, uses_shared_libraries = any(uses_shared_libraries), imports", "jax_tags: Tags specific to the JAX test. (e.g. `\"notap\"`). disabled_substrates:", "substrate_runfiles_symlinks = rule( implementation = _substrate_runfiles_symlinks_impl, attrs = { \"substrate\":", "substrate variants. tags: Tags global to this test target. NumPy", "substrate.\"\"\" dep_to_check = dep if dep.startswith(\":\"): dep_to_check = \"{}{}\".format(native.package_name(), dep)", "= [ \"internal:all_util\", \"internal:docstring_util\", \"internal:reparameterization\", \"layers\", \"platform_google\", ] REWRITER_TARGET =", "f # Construct the output structures to pass along Python", "srcs for NumPy and JAX substrates. deps: As with `py_library`.", "which we have the internal # py2and3_test and py_test comingling", "KIND, either express or implied. # See the License for", "src filename for the given substrate.\"\"\" return \"_{}/_generated_{}\".format(substrate, src) def", "Otherwise the OSS export # rewrite process becomes irreversible. def", "main = \"{}.py\".format(name), deps = deps, tags = tags, srcs_version", "Copyright 2019 The TensorFlow Probability Authors. # # Licensed under", "rule will aggregate and pass along deps while adding the", "\"jax\") native.genrule( name = \"rewrite_{}_jax\".format(name), srcs = srcs, outs =", "the test. Each substrate will have a dedicated `py2and3_test` suffixed", "_substrate_srcs(srcs, substrate): \"\"\"Rewrite src filenames for the given substrate.\"\"\" return", "a `'tfp_numpy'` tag, and JAX gets a `'tfp_jax'` tag. A", "test. Each substrate will have a dedicated `py2and3_test` suffixed with", "substrate.\"\"\" return \"_{}/_generated_{}\".format(substrate, src) def _substrate_srcs(srcs, substrate): \"\"\"Rewrite src filenames", "= \"{}.tf\".format(name), size = size, srcs = srcs, main =", "deps, tags = tags, srcs_version = srcs_version, timeout = timeout,", "(the \"License\"); # you may not use this file except", "full target.\"\"\" if \":\" not in dep: dep = \"{}:{}\".format(dep,", "context. Returns: Info objects to propagate deps and add runfiles", "with `py_test`. \"\"\" name_tag = \"_{}\".format(name) tags = [t for", "also gets a `'tfp_numpy'` tag, and JAX gets a `'tfp_jax'`", "= depset(transitive = imports), has_py2_only_sources = any(has_py2_only_sources), has_py3_only_sources = any(has_py3_only_sources),", "list of py_library labels. These are passed along. Args: ctx:", "attr.string(), \"deps\": attr.label_list(), }, ) def multi_substrate_py_library( name, srcs =", "shard_count = shard_count, ) if \"jax\" not in disabled_substrates: jax_srcs", "with `py_test`. These will have a `genrule` emitted to rewrite", "# # Unless required by applicable law or agreed to", "to this test target. NumPy also gets a `'tfp_numpy'` tag,", "\"\"\"Build defs for TF/NumPy/JAX-variadic libraries & tests.\"\"\" # [internal] load", "we have the internal # py2and3_test and py_test comingling in", "timeout: As with `py_test`. shard_count: As with `py_test`. \"\"\" name_tag", "name = \"{}.numpy\".format(name), size = numpy_size or size, srcs =", "= \"rewrite_{}_jax\".format(name), srcs = srcs, outs = jax_srcs, cmd =", "propagate deps and add runfiles symlinks. \"\"\" # Aggregate the", "attributes: - substrate: One of 'jax' or 'numpy'; which substrate", "\"{}.numpy\".format(name), substrate = \"numpy\", deps = [\":{}.numpy.raw\".format(name)], testonly = testonly,", "with `py_test`. shard_count: As with `py_test`. \"\"\" name_tag = \"_{}\".format(name)", "{}) $(SRCS) --omit_deps={} > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_numpy), ), tools =", "Tags specific to the NumPy test. (e.g. `\"notap\"`). jax_tags: Tags", "Each substrate will have a dedicated `py2and3_test` suffixed with '.tf',", "\"{}.{}\".format(dep, substrate) return \"{}:{}.{}\".format(dep, dep.split(\"/\")[-1], substrate) return dep def _substrate_deps(deps,", "srcs] def _substrate_dep(dep, substrate): \"\"\"Convert a single dep to one", "tag. A `f'_{name}'` tag is used to produce the `test_suite`.", "`'tfp_numpy'` tag, and JAX gets a `'tfp_jax'` tag. A `f'_{name}'`", "implied. # See the License for the specific language governing", "= transitive_sources) runfiles_dict = {} substrate = ctx.attr.substrate file_substr =", "`py_test`. jax_size: A size override for the JAX target. numpy_size:", "testonly, ) # Add symlinks under tfp/substrates/numpy. substrate_runfiles_symlinks( name =", "`\"notap\"`). jax_tags: Tags specific to the JAX test. (e.g. `\"notap\"`).", "genrule file pattern, i.e. `'_jax/_generated_normal.py'`. This rule will aggregate and", "python_version are added (with value `\"PY3\"`) if not specified. \"\"\"", "or size, srcs = numpy_srcs, main = _substrate_src(\"{}.py\".format(name), \"numpy\"), deps", "tags = tags, srcs_version = srcs_version, timeout = timeout, shard_count", "JAX build dep py3_test( name = \"{}.jax\".format(name), size = jax_size", "size override for the numpy target. srcs: As with `py_test`.", "timeout, shard_count = shard_count, ) if \"jax\" not in disabled_substrates:", "a `'tfp_jax'` tag. A `f'_{name}'` tag is used to produce", "\"jax\"), srcs_version = srcs_version, testonly = testonly, ) # Add", "in f.short_path: pre, post = f.short_path.split(\"/python/\") out_path = \"{}/substrates/{}/{}\".format( pre,", "TFP `py_library` for each of TF, NumPy, and JAX. Args:", "srcs_version, timeout = timeout, shard_count = shard_count, ) if \"numpy\"", "testonly, ) remove_deps = [ \"//third_party/py/tensorflow\", \"//third_party/py/tensorflow:tensorflow\", ] trimmed_deps =", "`genrule` emitted to rewrite NumPy and JAX variants, writing the", "= [\":{}.numpy.raw\".format(name)], testonly = testonly, ) resolved_omit_deps_jax = [ _resolve_omit_dep(dep)", "specified. \"\"\" kwargs = dict(kwargs) if \"srcs_version\" not in kwargs:", "rule which adds runfiles symlinks for files matching a substrate", "def _resolve_omit_dep(dep): \"\"\"Resolves a `substrates_omit_deps` item to full target.\"\"\" if", "dedicated `py2and3_test` suffixed with '.tf', '.numpy', or '.jax' as appropriate.", "adding the given symlinks to the runfiles structure. Build rule", "return dep if \":\" in dep: return \"{}.{}\".format(dep, substrate) return", "the JAX target. numpy_size: A size override for the numpy", "$@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_jax), ), tools = [REWRITER_TARGET], ) native.py_library( name", "if PyInfo in dep: transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) #", "test target. NumPy also gets a `'tfp_numpy'` tag, and JAX", ") resolved_omit_deps_jax = [ _resolve_omit_dep(dep) for dep in substrates_omit_deps +", "Unless required by applicable law or agreed to in writing,", "name_tag = \"_{}\".format(name) tags = [t for t in tags]", "substrates to disable, items from [\"numpy\", \"jax\"]. srcs_version: As with", "[], tags = [], numpy_tags = [], jax_tags = [],", "transitive_sources = [] uses_shared_libraries = [] imports = [] has_py2_only_sources", "= \"//tensorflow_probability/python/internal/backend/{}\".format(substrate) if backend_dep not in new_deps: new_deps.append(backend_dep) return new_deps", "transitive_files = py_runfiles, root_symlinks = runfiles_dict, )) return py_info, py_cc_link_info,", "jax_size: A size override for the JAX target. numpy_size: A", "the specific language governing permissions and # limitations under the", "dep to one appropriate for the given substrate.\"\"\" dep_to_check =", "`py_library`. A `genrule` is used to rewrite srcs for NumPy", "Aggregate the depset inputs to resolve transitive dependencies. transitive_sources =", "TF, NumPy, and JAX. Args: name: The TF `py_library` name.", "= any(uses_shared_libraries), imports = depset(transitive = imports), has_py2_only_sources = any(has_py2_only_sources),", "if PyCcLinkParamsProvider in dep: # DisableOnExport # cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport", "no_rewrite in NO_REWRITE_NEEDED: if no_rewrite in dep_to_check: return dep if", "_substrate_runfiles_symlinks_impl(ctx): \"\"\"A custom BUILD rule to generate python runfiles symlinks.", "\"numpy\")], cmd = \"$(location {}) $(SRCS) --omit_deps={} > $@\".format( REWRITER_TARGET,", "TF `py_library` name. NumPy and JAX libraries have '.numpy' and", "_resolve_omit_dep(dep): \"\"\"Resolves a `substrates_omit_deps` item to full target.\"\"\" if \":\"", "the given substrate.\"\"\" new_deps = [_substrate_dep(dep, substrate) for dep in", "= depset(transitive = transitive_sources) runfiles_dict = {} substrate = ctx.attr.substrate", "\"{}:{}.{}\".format(dep, dep.split(\"/\")[-1], substrate) return dep def _substrate_deps(deps, substrate): \"\"\"Convert deps", "Args: ctx: Rule analysis context. Returns: Info objects to propagate", "for the given substrate.\"\"\" return \"_{}/_generated_{}\".format(substrate, src) def _substrate_srcs(srcs, substrate):", "\"\"\"Internal/external reversibility, denotes py3-only vs py2+3 tests. Args: *args: Passed", "def py3_test(*args, **kwargs): \"\"\"Internal/external reversibility, denotes py3-only vs py2+3 tests.", "are not rewritten for the substrates. jax_omit_deps: List of deps", "\"PY3\" native.py_test(*args, **kwargs) def _resolve_omit_dep(dep): \"\"\"Resolves a `substrates_omit_deps` item to", "dep: cc_infos.append(dep[CcInfo]) # Determine the set of symlinks to generate.", "= [], disabled_substrates = [], srcs_version = \"PY2AND3\", timeout =", "size: As with `py_test`. jax_size: A size override for the", "in new_deps: new_deps.append(backend_dep) return new_deps # This is needed for", "[], deps = [], tags = [], numpy_tags = [],", "disabled_substrates: numpy_srcs = _substrate_srcs(srcs, \"numpy\") native.genrule( name = \"rewrite_{}_numpy\".format(name), srcs", "srcs = [src], outs = [_substrate_src(src, \"numpy\")], cmd = \"$(location", "and JAX libraries have '.numpy' and '.jax' appended. srcs: As", "if no_rewrite in dep_to_check: return dep if \"tensorflow_probability/\" in dep", "testonly = testonly, ) resolved_omit_deps_jax = [ _resolve_omit_dep(dep) for dep", "for src in srcs: native.genrule( name = \"rewrite_{}_jax\".format(src.replace(\".\", \"_\")), srcs", "[ \"//third_party/py/tensorflow\", \"//third_party/py/tensorflow:tensorflow\", ] trimmed_deps = [dep for dep in", "As with `py_test`. shard_count: As with `py_test`. \"\"\" name_tag =", "$(SRCS) --omit_deps={} --numpy_to_jax > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_jax), ), tools =", "override for the JAX target. numpy_size: A size override for", "substrates_omit_deps + jax_omit_deps ] jax_srcs = _substrate_srcs(srcs, \"jax\") for src", ") py3_test( name = \"{}.numpy\".format(name), size = numpy_size or size,", "return dep def _substrate_runfiles_symlinks_impl(ctx): \"\"\"A custom BUILD rule to generate", "will aggregate and pass along deps while adding the given", "srcs, deps = deps, srcs_version = srcs_version, testonly = testonly,", "[internal] Add JAX build dep py3_test( name = \"{}.jax\".format(name), size", "if \":\" in dep: return \"{}.{}\".format(dep, substrate) return \"{}:{}.{}\".format(dep, dep.split(\"/\")[-1],", "NO_REWRITE_NEEDED = [ \"internal:all_util\", \"internal:docstring_util\", \"internal:reparameterization\", \"layers\", \"platform_google\", ] REWRITER_TARGET", "BUILD rule to generate python runfiles symlinks. A custom build", "tags: Tags global to this test target. NumPy also gets", "trimmed_deps = [dep for dep in deps if (dep not", "timeout = timeout, shard_count = shard_count, ) if \"numpy\" not", "JAX. Args: name: Name of the `test_suite` which covers TF,", "The TensorFlow Probability Authors. # # Licensed under the Apache", "Args: name: Name of the `test_suite` which covers TF, NumPy", "matching a substrate genrule file pattern, i.e. `'_jax/_generated_normal.py'`. This rule", "rewritten to depend on substrate-specific libraries for substrate variants. substrates_omit_deps:", "[], testonly = 0, srcs_version = \"PY2AND3\"): \"\"\"A TFP `py_library`", "= testonly, ) remove_deps = [ \"//third_party/py/tensorflow\", \"//third_party/py/tensorflow:tensorflow\", ] trimmed_deps", "= [REWRITER_TARGET], ) jax_deps = _substrate_deps(deps, \"jax\") # [internal] Add", "= tags + [\"tfp_jax\"] + jax_tags, srcs_version = srcs_version, python_version", "f.short_path: pre, post = f.short_path.split(\"/python/\") out_path = \"{}/substrates/{}/{}\".format( pre, substrate,", "are passed along. Args: ctx: Rule analysis context. Returns: Info", "\"numpy\"), deps = _substrate_deps(trimmed_deps, \"numpy\"), srcs_version = srcs_version, testonly =", "deps = deps, srcs_version = srcs_version, testonly = testonly, )", "symlinks to the runfiles structure. Build rule attributes: - substrate:", "in f.dirname and file_substr in f.short_path: pre, post = f.short_path.split(\"/python/\")", "You may obtain a copy of the License at #", "filenames for the given substrate.\"\"\" return [_substrate_src(src, substrate) for src", "file_substr in f.short_path: pre, post = f.short_path.split(\"/python/\") out_path = \"{}/substrates/{}/{}\".format(", "deps to omit for the JAX substrate. numpy_omit_deps: List of", "] for src in srcs: native.genrule( name = \"rewrite_{}_numpy\".format(src.replace(\".\", \"_\")),", "tests. Args: *args: Passed to underlying py_test. **kwargs: Passed to", "uses_shared_libraries = any(uses_shared_libraries), imports = depset(transitive = imports), has_py2_only_sources =", "srcs_version = srcs_version, testonly = testonly, ) remove_deps = [", "runfiles structure. Build rule attributes: - substrate: One of 'jax'", "PyInfo in dep: transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) # if", "As with `py_test`. These will have a `genrule` emitted to", "governing permissions and # limitations under the License. # ============================================================================", "variants. tags: Tags global to this test target. NumPy also", "A list of py_library labels. These are passed along. Args:", "dep = \"{}:{}\".format(dep, dep.split(\"/\")[-1]) if dep.startswith(\":\"): dep = \"{}{}\".format(native.package_name(), dep)", "name = \"rewrite_{}_jax\".format(src.replace(\".\", \"_\")), srcs = [src], outs = [_substrate_src(src,", "_resolve_omit_dep(dep) for dep in substrates_omit_deps + jax_omit_deps ] jax_srcs =", "of the test. Each substrate will have a dedicated `py2and3_test`", "substrate. numpy_omit_deps: List of deps to omit for the NumPy", "in tags] tags.append(name_tag) tags.append(\"multi_substrate\") native.py_test( name = \"{}.tf\".format(name), size =", "= [ _resolve_omit_dep(dep) for dep in substrates_omit_deps + jax_omit_deps ]", "name = \"{}.numpy\".format(name), substrate = \"numpy\", deps = [\":{}.numpy.raw\".format(name)], testonly", "`py_library`. \"\"\" native.py_library( name = name, srcs = srcs, deps", "srcs = [], deps = [], tags = [], numpy_tags", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "with `py_test`. timeout: As with `py_test`. shard_count: As with `py_test`.", "depset( transitive = [depset(transitive = [ dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files, ]) for", "is rewritten to depend on substrate-specific libraries for substrate variants.", "shard_count, ) if \"jax\" not in disabled_substrates: jax_srcs = _substrate_srcs(srcs,", "for the given substrate.\"\"\" new_deps = [_substrate_dep(dep, substrate) for dep", "to propagate deps and add runfiles symlinks. \"\"\" # Aggregate", "omit if those libraries are not rewritten for the substrates.", "and JAX substrates. deps: As with `py_library`. The list is", "and file_substr in f.short_path: pre, post = f.short_path.split(\"/python/\") out_path =", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "= \"jax\", deps = [\":{}.jax.raw\".format(name)], testonly = testonly, ) def", "\"platform_google\", ] REWRITER_TARGET = \"//tensorflow_probability/substrates/meta:rewrite\" RUNFILES_ROOT = \"tensorflow_probability/\" def _substrate_src(src,", "has_py2_only_sources = [] has_py3_only_sources = [] cc_infos = [] for", "License. # You may obtain a copy of the License", "name = \"rewrite_{}_jax\".format(name), srcs = srcs, outs = jax_srcs, cmd", "tag, and JAX gets a `'tfp_jax'` tag. A `f'_{name}'` tag", "for src in srcs: native.genrule( name = \"rewrite_{}_numpy\".format(src.replace(\".\", \"_\")), srcs", "to the JAX test. (e.g. `\"notap\"`). disabled_substrates: Iterable of substrates", "= None, srcs = [], deps = [], tags =", "adds runfiles symlinks for files matching a substrate genrule file", "= [_substrate_dep(dep, substrate) for dep in deps] backend_dep = \"//tensorflow_probability/python/internal/backend/{}\".format(substrate)", "= tags, srcs_version = srcs_version, timeout = timeout, shard_count =", "\"{}:{}\".format(dep, dep.split(\"/\")[-1]) if dep.startswith(\":\"): dep = \"{}{}\".format(native.package_name(), dep) return dep", "dep def _substrate_deps(deps, substrate): \"\"\"Convert deps to those appropriate for", "= \"tensorflow_probability/\" def _substrate_src(src, substrate): \"\"\"Rewrite a single src filename", "= [] cc_infos = [] for dep in ctx.attr.deps: if", "pre, post = f.short_path.split(\"/python/\") out_path = \"{}/substrates/{}/{}\".format( pre, substrate, post.replace(file_substr,", "substrate_runfiles_symlinks( name = \"{}.jax\".format(name), substrate = \"jax\", deps = [\":{}.jax.raw\".format(name)],", "\"numpy\"), tags = tags + [\"tfp_numpy\"] + numpy_tags, srcs_version =", "License. # ============================================================================ \"\"\"Build defs for TF/NumPy/JAX-variadic libraries & tests.\"\"\"", "\",\".join(resolved_omit_deps_jax), ), tools = [REWRITER_TARGET], ) native.py_library( name = \"{}.jax.raw\".format(name),", "\"{}{}\".format(native.package_name(), dep) return dep def _substrate_runfiles_symlinks_impl(ctx): \"\"\"A custom BUILD rule", "(e.g. `\"notap\"`). disabled_substrates: Iterable of substrates to disable, items from", "= \"small\", jax_size = None, numpy_size = None, srcs =", "\":\" not in dep: dep = \"{}:{}\".format(dep, dep.split(\"/\")[-1]) if dep.startswith(\":\"):", "srcs_version = srcs_version, python_version = \"PY3\", timeout = timeout, shard_count", "to underlying py_test. srcs_version and python_version are added (with value", "to omit for the NumPy substrate. testonly: As with `py_library`.", "of TF, NumPy, and JAX. Args: name: Name of the", "`test_suite` which covers TF, NumPy and JAX variants of the", "= _substrate_deps(deps, \"jax\") # [internal] Add JAX build dep py3_test(", ") native.py_library( name = \"{}.numpy.raw\".format(name), srcs = _substrate_srcs(srcs, \"numpy\"), deps", "= [REWRITER_TARGET], ) native.py_library( name = \"{}.numpy.raw\".format(name), srcs = _substrate_srcs(srcs,", "file pattern, i.e. `'_jax/_generated_normal.py'`. This rule will aggregate and pass", "NumPy also gets a `'tfp_numpy'` tag, and JAX gets a", "A custom build rule which adds runfiles symlinks for files", "= [], numpy_omit_deps = [], testonly = 0, srcs_version =", "rule attributes: - substrate: One of 'jax' or 'numpy'; which", "NO_REWRITE_NEEDED: if no_rewrite in dep_to_check: return dep if \"tensorflow_probability/\" in", "with `py_library`. srcs_version: As with `py_library`. \"\"\" native.py_library( name =", "if \"srcs_version\" not in kwargs: kwargs[\"srcs_version\"] = \"PY3\" if \"python_version\"", "`py_library`. srcs_version: As with `py_library`. \"\"\" native.py_library( name = name,", "= \"$(location {}) $(SRCS) --omit_deps={} > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_numpy), ),", "the `test_suite`. numpy_tags: Tags specific to the NumPy test. (e.g.", "if (dep not in substrates_omit_deps and dep not in remove_deps)]", "kwargs: kwargs[\"python_version\"] = \"PY3\" native.py_test(*args, **kwargs) def _resolve_omit_dep(dep): \"\"\"Resolves a", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "= [src], outs = [_substrate_src(src, \"numpy\")], cmd = \"$(location {})", "in srcs: native.genrule( name = \"rewrite_{}_jax\".format(src.replace(\".\", \"_\")), srcs = [src],", "dict(kwargs) if \"srcs_version\" not in kwargs: kwargs[\"srcs_version\"] = \"PY3\" if", "OSS export # rewrite process becomes irreversible. def py3_test(*args, **kwargs):", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "]) for dep in ctx.attr.deps], ) runfiles = DefaultInfo(runfiles =", "\"jax\") for src in srcs: native.genrule( name = \"rewrite_{}_jax\".format(src.replace(\".\", \"_\")),", "deps, srcs_version = srcs_version, testonly = testonly, ) remove_deps =", "test. (e.g. `\"notap\"`). jax_tags: Tags specific to the JAX test.", "\"\"\"Rewrite src filenames for the given substrate.\"\"\" return [_substrate_src(src, substrate)", "_substrate_srcs(srcs, \"numpy\"), deps = _substrate_deps(trimmed_deps, \"numpy\"), srcs_version = srcs_version, testonly", "def _substrate_srcs(srcs, substrate): \"\"\"Rewrite src filenames for the given substrate.\"\"\"", "numpy_tags, srcs_version = srcs_version, python_version = \"PY3\", timeout = timeout,", "language governing permissions and # limitations under the License. #", "= {} substrate = ctx.attr.substrate file_substr = \"_{}/_generated_\".format(substrate) for f", "filename for the given substrate.\"\"\" return \"_{}/_generated_{}\".format(substrate, src) def _substrate_srcs(srcs,", "required by applicable law or agreed to in writing, software", "have '.numpy' and '.jax' appended. srcs: As with `py_library`. A", "= 0, srcs_version = \"PY2AND3\"): \"\"\"A TFP `py_library` for each", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "added (with value `\"PY3\"`) if not specified. \"\"\" kwargs =", "native.genrule( name = \"rewrite_{}_numpy\".format(src.replace(\".\", \"_\")), srcs = [src], outs =", "under tfp/substrates/jax. substrate_runfiles_symlinks( name = \"{}.jax\".format(name), substrate = \"jax\", deps", "src in srcs: native.genrule( name = \"rewrite_{}_numpy\".format(src.replace(\".\", \"_\")), srcs =", "or size, srcs = jax_srcs, main = _substrate_src(\"{}.py\".format(name), \"jax\"), deps", "= any(has_py2_only_sources), has_py3_only_sources = any(has_py3_only_sources), ) py_cc_link_info = cc_common.merge_cc_infos(cc_infos =", "agreed to in writing, software # distributed under the License", "item to full target.\"\"\" if \":\" not in dep: dep", "to pass along Python srcs/deps/etc. py_info = PyInfo( transitive_sources =", "distributed under the License is distributed on an \"AS IS\"", "substrate): \"\"\"Convert a single dep to one appropriate for the", "for dep in ctx.attr.deps: if PyInfo in dep: transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries)", "one appropriate for the given substrate.\"\"\" dep_to_check = dep if", "# rewrite process becomes irreversible. def py3_test(*args, **kwargs): \"\"\"Internal/external reversibility,", "= srcs_version, python_version = \"PY3\", timeout = timeout, shard_count =", "\"$(location {}) $(SRCS) > $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) py3_test(", ") remove_deps = [ \"//third_party/py/tensorflow\", \"//third_party/py/tensorflow:tensorflow\", ] trimmed_deps = [dep", "tests.\"\"\" # [internal] load python3.bzl NO_REWRITE_NEEDED = [ \"internal:all_util\", \"internal:docstring_util\",", "\"tensorflow_probability/\" def _substrate_src(src, substrate): \"\"\"Rewrite a single src filename for", "underlying py_test. srcs_version and python_version are added (with value `\"PY3\"`)", "= _substrate_deps(trimmed_deps, \"numpy\"), srcs_version = srcs_version, testonly = testonly, )", "aggregate and pass along deps while adding the given symlinks", "= [dep for dep in deps if (dep not in", "no_rewrite in dep_to_check: return dep if \"tensorflow_probability/\" in dep or", "dep_to_check = dep if dep.startswith(\":\"): dep_to_check = \"{}{}\".format(native.package_name(), dep) for", "= _substrate_src(\"{}.py\".format(name), \"numpy\"), deps = _substrate_deps(deps, \"numpy\"), tags = tags", "name = \"{}.jax.raw\".format(name), srcs = jax_srcs, deps = _substrate_deps(trimmed_deps, \"jax\"),", "# if PyCcLinkParamsProvider in dep: # DisableOnExport # cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) #", "= \"{}.numpy.raw\".format(name), srcs = _substrate_srcs(srcs, \"numpy\"), deps = _substrate_deps(trimmed_deps, \"numpy\"),", "the NumPy substrate. testonly: As with `py_library`. srcs_version: As with", "symlinks to generate. transitive_sources = depset(transitive = transitive_sources) runfiles_dict =", "resolved_omit_deps_jax = [ _resolve_omit_dep(dep) for dep in substrates_omit_deps + jax_omit_deps", "None): \"\"\"A TFP `py2and3_test` for each of TF, NumPy, and", ") if \"jax\" not in disabled_substrates: jax_srcs = _substrate_srcs(srcs, \"jax\")", "= \"rewrite_{}_jax\".format(src.replace(\".\", \"_\")), srcs = [src], outs = [_substrate_src(src, \"jax\")],", "attrs = { \"substrate\": attr.string(), \"deps\": attr.label_list(), }, ) def", "A size override for the JAX target. numpy_size: A size", ") runfiles = DefaultInfo(runfiles = ctx.runfiles( transitive_files = py_runfiles, root_symlinks", ") native.py_library( name = \"{}.jax.raw\".format(name), srcs = jax_srcs, deps =", "deps while adding the given symlinks to the runfiles structure.", "add runfiles symlinks. \"\"\" # Aggregate the depset inputs to", "OR CONDITIONS OF ANY KIND, either express or implied. #", "outs = [_substrate_src(src, \"numpy\")], cmd = \"$(location {}) $(SRCS) --omit_deps={}", "name. NumPy and JAX libraries have '.numpy' and '.jax' appended.", "the License is distributed on an \"AS IS\" BASIS, #", "[ \"internal:all_util\", \"internal:docstring_util\", \"internal:reparameterization\", \"layers\", \"platform_google\", ] REWRITER_TARGET = \"//tensorflow_probability/substrates/meta:rewrite\"", "structures to pass along Python srcs/deps/etc. py_info = PyInfo( transitive_sources", "\"_\")), srcs = [src], outs = [_substrate_src(src, \"numpy\")], cmd =", "transitive dependencies. transitive_sources = [] uses_shared_libraries = [] imports =", "\"\"\"A TFP `py2and3_test` for each of TF, NumPy, and JAX.", "and '.jax' appended. srcs: As with `py_library`. A `genrule` is", "dep in deps if (dep not in substrates_omit_deps and dep", "$(SRCS) --numpy_to_jax > $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) jax_deps =", "the given substrate.\"\"\" return [_substrate_src(src, substrate) for src in srcs]", "denotes py3-only vs py2+3 tests. Args: *args: Passed to underlying", "becomes irreversible. def py3_test(*args, **kwargs): \"\"\"Internal/external reversibility, denotes py3-only vs", "to disable, items from [\"numpy\", \"jax\"]. srcs_version: As with `py_test`.", "in srcs: native.genrule( name = \"rewrite_{}_numpy\".format(src.replace(\".\", \"_\")), srcs = [src],", "to one appropriate for the given substrate.\"\"\" dep_to_check = dep", "law or agreed to in writing, software # distributed under", "= \"_{}\".format(name) tags = [t for t in tags] tags.append(name_tag)", "srcs = jax_srcs, main = _substrate_src(\"{}.py\".format(name), \"jax\"), deps = jax_deps,", "[], jax_tags = [], disabled_substrates = [], srcs_version = \"PY2AND3\",", "# Copyright 2019 The TensorFlow Probability Authors. # # Licensed", "# py2and3_test and py_test comingling in BUILD files. Otherwise the", "def multi_substrate_py_test( name, size = \"small\", jax_size = None, numpy_size", "given substrate.\"\"\" dep_to_check = dep if dep.startswith(\":\"): dep_to_check = \"{}{}\".format(native.package_name(),", "JAX target. numpy_size: A size override for the numpy target.", "size = size, srcs = srcs, main = \"{}.py\".format(name), deps", "Add symlinks under tfp/substrates/numpy. substrate_runfiles_symlinks( name = \"{}.numpy\".format(name), substrate =", "Add JAX build dep py3_test( name = \"{}.jax\".format(name), size =", "= \"{}/substrates/{}/{}\".format( pre, substrate, post.replace(file_substr, \"\"), ) runfiles_dict[RUNFILES_ROOT + out_path]", "cmd = \"$(location {}) $(SRCS) > $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET],", "may obtain a copy of the License at # #", "of the `test_suite` which covers TF, NumPy and JAX variants", "substrate): \"\"\"Convert deps to those appropriate for the given substrate.\"\"\"", "= numpy_size or size, srcs = numpy_srcs, main = _substrate_src(\"{}.py\".format(name),", "= _substrate_deps(trimmed_deps, \"jax\"), srcs_version = srcs_version, testonly = testonly, )", "transitional period during which we have the internal # py2and3_test", "in ctx.attr.deps], ) runfiles = DefaultInfo(runfiles = ctx.runfiles( transitive_files =", "may not use this file except in compliance with the", "= [], deps = [], tags = [], numpy_tags =", "return \"{}.{}\".format(dep, substrate) return \"{}:{}.{}\".format(dep, dep.split(\"/\")[-1], substrate) return dep def", "this file except in compliance with the License. # You", "process becomes irreversible. def py3_test(*args, **kwargs): \"\"\"Internal/external reversibility, denotes py3-only", "= cc_infos) py_runfiles = depset( transitive = [depset(transitive = [", "= depset( transitive = [depset(transitive = [ dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files, ])", "libraries are not rewritten for the substrates. jax_omit_deps: List of", "libraries for substrate variants. tags: Tags global to this test", "srcs: As with `py_test`. These will have a `genrule` emitted", "See documentation at: # https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks = rule( implementation =", "writing the test file into a subdirectory. deps: As with", "multi_substrate_py_library( name, srcs = [], deps = [], substrates_omit_deps =", "# # Licensed under the Apache License, Version 2.0 (the", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "out_path = \"{}/substrates/{}/{}\".format( pre, substrate, post.replace(file_substr, \"\"), ) runfiles_dict[RUNFILES_ROOT +", "substrate: One of 'jax' or 'numpy'; which substrate this applies", "Determine the set of symlinks to generate. transitive_sources = depset(transitive", "\"PY2AND3\"): \"\"\"A TFP `py_library` for each of TF, NumPy, and", "DefaultInfo(runfiles = ctx.runfiles( transitive_files = py_runfiles, root_symlinks = runfiles_dict, ))", "if dep.startswith(\":\"): dep = \"{}{}\".format(native.package_name(), dep) return dep def _substrate_runfiles_symlinks_impl(ctx):", "is used to produce the `test_suite`. numpy_tags: Tags specific to", "`py_test`. The list is rewritten to depend on substrate-specific libraries", "return py_info, py_cc_link_info, runfiles # See documentation at: # https://docs.bazel.build/versions/3.4.0/skylark/rules.html", "[REWRITER_TARGET], ) native.py_library( name = \"{}.numpy.raw\".format(name), srcs = _substrate_srcs(srcs, \"numpy\"),", "not in disabled_substrates: jax_srcs = _substrate_srcs(srcs, \"jax\") native.genrule( name =", "or '.jax' as appropriate. size: As with `py_test`. jax_size: A", "in dep or dep.startswith(\":\"): if \"internal/backend\" in dep: return dep", "= \"rewrite_{}_numpy\".format(src.replace(\".\", \"_\")), srcs = [src], outs = [_substrate_src(src, \"numpy\")],", "= \"PY2AND3\"): \"\"\"A TFP `py_library` for each of TF, NumPy,", "runfiles # See documentation at: # https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks = rule(", "src in srcs: native.genrule( name = \"rewrite_{}_jax\".format(src.replace(\".\", \"_\")), srcs =", "\"small\", jax_size = None, numpy_size = None, srcs = [],", "'.numpy', or '.jax' as appropriate. size: As with `py_test`. jax_size:", "dep in deps] backend_dep = \"//tensorflow_probability/python/internal/backend/{}\".format(substrate) if backend_dep not in", "export # rewrite process becomes irreversible. def py3_test(*args, **kwargs): \"\"\"Internal/external", "# This is needed for the transitional period during which", "in deps if (dep not in substrates_omit_deps and dep not", "Tags global to this test target. NumPy also gets a", "= srcs_version, timeout = timeout, shard_count = shard_count, ) if", "those libraries are not rewritten for the substrates. jax_omit_deps: List", "[REWRITER_TARGET], ) native.py_library( name = \"{}.jax.raw\".format(name), srcs = jax_srcs, deps", "src in srcs] def _substrate_dep(dep, substrate): \"\"\"Convert a single dep", ") def multi_substrate_py_test( name, size = \"small\", jax_size = None,", "or implied. # See the License for the specific language", "depset(transitive = transitive_sources) runfiles_dict = {} substrate = ctx.attr.substrate file_substr", "\"\"\"A TFP `py_library` for each of TF, NumPy, and JAX.", "substrate.\"\"\" new_deps = [_substrate_dep(dep, substrate) for dep in deps] backend_dep", "testonly = testonly, ) # Add symlinks under tfp/substrates/jax. substrate_runfiles_symlinks(", "= timeout, shard_count = shard_count, ) if \"numpy\" not in", "given substrate.\"\"\" return [_substrate_src(src, substrate) for src in srcs] def", "numpy_srcs = _substrate_srcs(srcs, \"numpy\") native.genrule( name = \"rewrite_{}_numpy\".format(name), srcs =", "\"deps\": attr.label_list(), }, ) def multi_substrate_py_library( name, srcs = [],", "the NumPy test. (e.g. `\"notap\"`). jax_tags: Tags specific to the", "srcs: As with `py_library`. A `genrule` is used to rewrite", "to omit for the JAX substrate. numpy_omit_deps: List of deps", "each of TF, NumPy, and JAX. Args: name: Name of", "[REWRITER_TARGET], ) jax_deps = _substrate_deps(deps, \"jax\") # [internal] Add JAX", "shard_count = shard_count, ) if \"numpy\" not in disabled_substrates: numpy_srcs", "f in transitive_sources.to_list(): if \"tensorflow_probability\" in f.dirname and file_substr in", "cc_infos = [] for dep in ctx.attr.deps: if PyInfo in", "along deps while adding the given symlinks to the runfiles", "has_py3_only_sources = [] cc_infos = [] for dep in ctx.attr.deps:", "new_deps: new_deps.append(backend_dep) return new_deps # This is needed for the", "--numpy_to_jax > $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) jax_deps = _substrate_deps(deps,", "\"//third_party/py/tensorflow:tensorflow\", ] trimmed_deps = [dep for dep in deps if", "= DefaultInfo(runfiles = ctx.runfiles( transitive_files = py_runfiles, root_symlinks = runfiles_dict,", "for dep in ctx.attr.deps], ) runfiles = DefaultInfo(runfiles = ctx.runfiles(", "outs = [_substrate_src(src, \"jax\")], cmd = \"$(location {}) $(SRCS) --omit_deps={}", "= ctx.runfiles( transitive_files = py_runfiles, root_symlinks = runfiles_dict, )) return", "[depset(transitive = [ dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files, ]) for dep in ctx.attr.deps],", "(dep not in substrates_omit_deps and dep not in remove_deps)] resolved_omit_deps_numpy", "of deps to omit for the NumPy substrate. testonly: As", "= \"numpy\", deps = [\":{}.numpy.raw\".format(name)], testonly = testonly, ) resolved_omit_deps_jax", "cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport if CcInfo in dep: cc_infos.append(dep[CcInfo]) # Determine", ")) return py_info, py_cc_link_info, runfiles # See documentation at: #", "numpy_srcs, main = _substrate_src(\"{}.py\".format(name), \"numpy\"), deps = _substrate_deps(deps, \"numpy\"), tags", "each of TF, NumPy, and JAX. Args: name: The TF", "= cc_common.merge_cc_infos(cc_infos = cc_infos) py_runfiles = depset( transitive = [depset(transitive", "tags.append(name_tag) tags.append(\"multi_substrate\") native.py_test( name = \"{}.tf\".format(name), size = size, srcs", "set of symlinks to generate. transitive_sources = depset(transitive = transitive_sources)", "native.py_library( name = name, srcs = srcs, deps = deps,", "srcs = [src], outs = [_substrate_src(src, \"jax\")], cmd = \"$(location", "dep in ctx.attr.deps], ) runfiles = DefaultInfo(runfiles = ctx.runfiles( transitive_files", "rewrite srcs for NumPy and JAX substrates. deps: As with", "in disabled_substrates: jax_srcs = _substrate_srcs(srcs, \"jax\") native.genrule( name = \"rewrite_{}_jax\".format(name),", "name = \"{}.tf\".format(name), size = size, srcs = srcs, main", "_substrate_deps(deps, \"jax\") # [internal] Add JAX build dep py3_test( name", "+ out_path] = f # Construct the output structures to", "not in remove_deps)] resolved_omit_deps_numpy = [ _resolve_omit_dep(dep) for dep in", "_substrate_srcs(srcs, \"jax\") for src in srcs: native.genrule( name = \"rewrite_{}_jax\".format(src.replace(\".\",", "srcs_version and python_version are added (with value `\"PY3\"`) if not", "= _substrate_runfiles_symlinks_impl, attrs = { \"substrate\": attr.string(), \"deps\": attr.label_list(), },", "\"{}.jax\".format(name), substrate = \"jax\", deps = [\":{}.jax.raw\".format(name)], testonly = testonly,", "imports = [] has_py2_only_sources = [] has_py3_only_sources = [] cc_infos", "dep not in remove_deps)] resolved_omit_deps_numpy = [ _resolve_omit_dep(dep) for dep", "py3_test(*args, **kwargs): \"\"\"Internal/external reversibility, denotes py3-only vs py2+3 tests. Args:", "py_library labels. These are passed along. Args: ctx: Rule analysis", "of TF, NumPy, and JAX. Args: name: The TF `py_library`", "items from [\"numpy\", \"jax\"]. srcs_version: As with `py_test`. timeout: As", "'numpy'; which substrate this applies to. - deps: A list", "cc_infos) py_runfiles = depset( transitive = [depset(transitive = [ dep[DefaultInfo].data_runfiles.files,", "substrate = \"jax\", deps = [\":{}.jax.raw\".format(name)], testonly = testonly, )", "$@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) jax_deps = _substrate_deps(deps, \"jax\") #", "numpy_tags = [], jax_tags = [], disabled_substrates = [], srcs_version", "deps: As with `py_test`. The list is rewritten to depend", "dep in ctx.attr.deps: if PyInfo in dep: transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports)", "with `py_library`. The list is rewritten to depend on substrate-specific", "in disabled_substrates: numpy_srcs = _substrate_srcs(srcs, \"numpy\") native.genrule( name = \"rewrite_{}_numpy\".format(name),", "passed along. Args: ctx: Rule analysis context. Returns: Info objects", "deps to those appropriate for the given substrate.\"\"\" new_deps =", "def multi_substrate_py_library( name, srcs = [], deps = [], substrates_omit_deps", "along. Args: ctx: Rule analysis context. Returns: Info objects to", "to produce the `test_suite`. numpy_tags: Tags specific to the NumPy", "= [] has_py3_only_sources = [] cc_infos = [] for dep", "numpy_omit_deps: List of deps to omit for the NumPy substrate.", "dep py3_test( name = \"{}.jax\".format(name), size = jax_size or size,", "tags] tags.append(name_tag) tags.append(\"multi_substrate\") native.py_test( name = \"{}.tf\".format(name), size = size,", "\":\" in dep: return \"{}.{}\".format(dep, substrate) return \"{}:{}.{}\".format(dep, dep.split(\"/\")[-1], substrate)", "= _substrate_deps(deps, \"numpy\"), tags = tags + [\"tfp_numpy\"] + numpy_tags,", "in writing, software # distributed under the License is distributed", "\"\"\"Resolves a `substrates_omit_deps` item to full target.\"\"\" if \":\" not", "custom BUILD rule to generate python runfiles symlinks. A custom", "srcs = [], deps = [], substrates_omit_deps = [], jax_omit_deps", "\"$(location {}) $(SRCS) --numpy_to_jax > $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], )", "substrate) for src in srcs] def _substrate_dep(dep, substrate): \"\"\"Convert a", "pre, substrate, post.replace(file_substr, \"\"), ) runfiles_dict[RUNFILES_ROOT + out_path] = f", "[\":{}.numpy.raw\".format(name)], testonly = testonly, ) resolved_omit_deps_jax = [ _resolve_omit_dep(dep) for", "disabled_substrates = [], srcs_version = \"PY2AND3\", timeout = None, shard_count", ") py_cc_link_info = cc_common.merge_cc_infos(cc_infos = cc_infos) py_runfiles = depset( transitive", "= [_substrate_src(src, \"jax\")], cmd = \"$(location {}) $(SRCS) --omit_deps={} --numpy_to_jax", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "target. srcs: As with `py_test`. These will have a `genrule`", "License, Version 2.0 (the \"License\"); # you may not use", "for NumPy and JAX substrates. deps: As with `py_library`. The", "substrate): \"\"\"Rewrite a single src filename for the given substrate.\"\"\"", "NumPy substrate. testonly: As with `py_library`. srcs_version: As with `py_library`.", "\"numpy\"), deps = _substrate_deps(deps, \"numpy\"), tags = tags + [\"tfp_numpy\"]", "for the JAX target. numpy_size: A size override for the", "specific to the JAX test. (e.g. `\"notap\"`). disabled_substrates: Iterable of", "+ jax_tags, srcs_version = srcs_version, python_version = \"PY3\", timeout =", "\"rewrite_{}_numpy\".format(src.replace(\".\", \"_\")), srcs = [src], outs = [_substrate_src(src, \"numpy\")], cmd", "the License for the specific language governing permissions and #", "`py_test`. \"\"\" name_tag = \"_{}\".format(name) tags = [t for t", "srcs_version, testonly = testonly, ) # Add symlinks under tfp/substrates/jax.", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "py_test. **kwargs: Passed to underlying py_test. srcs_version and python_version are", "files matching a substrate genrule file pattern, i.e. `'_jax/_generated_normal.py'`. This", "**kwargs) def _resolve_omit_dep(dep): \"\"\"Resolves a `substrates_omit_deps` item to full target.\"\"\"", "# See documentation at: # https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks = rule( implementation", "suffixed with '.tf', '.numpy', or '.jax' as appropriate. size: As", "variants, writing the test file into a subdirectory. deps: As", "tags = tags + [\"tfp_numpy\"] + numpy_tags, srcs_version = srcs_version,", "`'_jax/_generated_normal.py'`. This rule will aggregate and pass along deps while", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "= \"PY3\", timeout = timeout, shard_count = shard_count, ) if", "# Add symlinks under tfp/substrates/numpy. substrate_runfiles_symlinks( name = \"{}.numpy\".format(name), substrate", "`py2and3_test` suffixed with '.tf', '.numpy', or '.jax' as appropriate. size:", "dep if \":\" in dep: return \"{}.{}\".format(dep, substrate) return \"{}:{}.{}\".format(dep,", "\"tensorflow_probability\" in f.dirname and file_substr in f.short_path: pre, post =", "\"_\")), srcs = [src], outs = [_substrate_src(src, \"jax\")], cmd =", "is used to rewrite srcs for NumPy and JAX substrates.", "dep if dep.startswith(\":\"): dep_to_check = \"{}{}\".format(native.package_name(), dep) for no_rewrite in", "\"jax\" not in disabled_substrates: jax_srcs = _substrate_srcs(srcs, \"jax\") native.genrule( name", "testonly, ) # Add symlinks under tfp/substrates/jax. substrate_runfiles_symlinks( name =", "of substrates to disable, items from [\"numpy\", \"jax\"]. srcs_version: As", "] jax_srcs = _substrate_srcs(srcs, \"jax\") for src in srcs: native.genrule(", "test file into a subdirectory. deps: As with `py_test`. The", "srcs = numpy_srcs, main = _substrate_src(\"{}.py\".format(name), \"numpy\"), deps = _substrate_deps(deps,", "in substrates_omit_deps + jax_omit_deps ] jax_srcs = _substrate_srcs(srcs, \"jax\") for", "Iterable of substrates to disable, items from [\"numpy\", \"jax\"]. srcs_version:", "target. numpy_size: A size override for the numpy target. srcs:", "# distributed under the License is distributed on an \"AS", "in dep: return \"{}.{}\".format(dep, substrate) return \"{}:{}.{}\".format(dep, dep.split(\"/\")[-1], substrate) return", "# Unless required by applicable law or agreed to in", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "srcs = srcs, outs = jax_srcs, cmd = \"$(location {})", "not in new_deps: new_deps.append(backend_dep) return new_deps # This is needed", "# ============================================================================ \"\"\"Build defs for TF/NumPy/JAX-variadic libraries & tests.\"\"\" #", "if not specified. \"\"\" kwargs = dict(kwargs) if \"srcs_version\" not", "specific to the NumPy test. (e.g. `\"notap\"`). jax_tags: Tags specific", "transitive = [depset(transitive = [ dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files, ]) for dep", "_substrate_runfiles_symlinks_impl, attrs = { \"substrate\": attr.string(), \"deps\": attr.label_list(), }, )", "\"rewrite_{}_numpy\".format(name), srcs = srcs, outs = numpy_srcs, cmd = \"$(location", "native.genrule( name = \"rewrite_{}_numpy\".format(name), srcs = srcs, outs = numpy_srcs,", "emitted to rewrite NumPy and JAX variants, writing the test", "override for the numpy target. srcs: As with `py_test`. These", "--omit_deps={} > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_numpy), ), tools = [REWRITER_TARGET], )", "the Apache License, Version 2.0 (the \"License\"); # you may", "name: Name of the `test_suite` which covers TF, NumPy and", "file into a subdirectory. deps: As with `py_test`. The list", "native.genrule( name = \"rewrite_{}_jax\".format(name), srcs = srcs, outs = jax_srcs,", "kwargs[\"srcs_version\"] = \"PY3\" if \"python_version\" not in kwargs: kwargs[\"python_version\"] =", "$@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) py3_test( name = \"{}.numpy\".format(name), size", "ctx.attr.deps], ) runfiles = DefaultInfo(runfiles = ctx.runfiles( transitive_files = py_runfiles,", "a single src filename for the given substrate.\"\"\" return \"_{}/_generated_{}\".format(substrate,", "\"internal/backend\" in dep: return dep if \":\" in dep: return", "= srcs, outs = numpy_srcs, cmd = \"$(location {}) $(SRCS)", "= [\":{}.jax.raw\".format(name)], testonly = testonly, ) def multi_substrate_py_test( name, size", "uses_shared_libraries = [] imports = [] has_py2_only_sources = [] has_py3_only_sources", "py_runfiles = depset( transitive = [depset(transitive = [ dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files,", "= \"{}.py\".format(name), deps = deps, tags = tags, srcs_version =", "and JAX. Args: name: Name of the `test_suite` which covers", ") runfiles_dict[RUNFILES_ROOT + out_path] = f # Construct the output", "the transitional period during which we have the internal #", "given substrate.\"\"\" return \"_{}/_generated_{}\".format(substrate, src) def _substrate_srcs(srcs, substrate): \"\"\"Rewrite src", "in BUILD files. Otherwise the OSS export # rewrite process", "\"jax\", deps = [\":{}.jax.raw\".format(name)], testonly = testonly, ) def multi_substrate_py_test(", "RUNFILES_ROOT = \"tensorflow_probability/\" def _substrate_src(src, substrate): \"\"\"Rewrite a single src", "\"jax\")], cmd = \"$(location {}) $(SRCS) --omit_deps={} --numpy_to_jax > $@\".format(", "to rewrite NumPy and JAX variants, writing the test file", "file_substr = \"_{}/_generated_\".format(substrate) for f in transitive_sources.to_list(): if \"tensorflow_probability\" in", "has_py3_only_sources = any(has_py3_only_sources), ) py_cc_link_info = cc_common.merge_cc_infos(cc_infos = cc_infos) py_runfiles", "generate. transitive_sources = depset(transitive = transitive_sources) runfiles_dict = {} substrate", "py_cc_link_info = cc_common.merge_cc_infos(cc_infos = cc_infos) py_runfiles = depset( transitive =", "transitive_sources = depset(transitive = transitive_sources) runfiles_dict = {} substrate =", "- substrate: One of 'jax' or 'numpy'; which substrate this", "into a subdirectory. deps: As with `py_test`. The list is", "variants. substrates_omit_deps: List of deps to omit if those libraries", "for no_rewrite in NO_REWRITE_NEEDED: if no_rewrite in dep_to_check: return dep", "and JAX variants of the test. Each substrate will have", "= \"$(location {}) $(SRCS) > $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], )", "under the License is distributed on an \"AS IS\" BASIS,", "(with value `\"PY3\"`) if not specified. \"\"\" kwargs = dict(kwargs)", "jax_srcs, deps = _substrate_deps(trimmed_deps, \"jax\"), srcs_version = srcs_version, testonly =", "list is rewritten to depend on substrate-specific libraries for substrate", "have the internal # py2and3_test and py_test comingling in BUILD", "jax_size = None, numpy_size = None, srcs = [], deps", "name = \"{}.jax\".format(name), substrate = \"jax\", deps = [\":{}.jax.raw\".format(name)], testonly", "the given substrate.\"\"\" dep_to_check = dep if dep.startswith(\":\"): dep_to_check =", "[\"tfp_numpy\"] + numpy_tags, srcs_version = srcs_version, python_version = \"PY3\", timeout", "= _substrate_srcs(srcs, \"jax\") native.genrule( name = \"rewrite_{}_jax\".format(name), srcs = srcs,", "subdirectory. deps: As with `py_test`. The list is rewritten to", "# limitations under the License. # ============================================================================ \"\"\"Build defs for", "kwargs: kwargs[\"srcs_version\"] = \"PY3\" if \"python_version\" not in kwargs: kwargs[\"python_version\"]", "> $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_jax), ), tools = [REWRITER_TARGET], ) native.py_library(", "\"internal:all_util\", \"internal:docstring_util\", \"internal:reparameterization\", \"layers\", \"platform_google\", ] REWRITER_TARGET = \"//tensorflow_probability/substrates/meta:rewrite\" RUNFILES_ROOT", "pass along Python srcs/deps/etc. py_info = PyInfo( transitive_sources = transitive_sources,", "\"\"\" # Aggregate the depset inputs to resolve transitive dependencies.", "resolve transitive dependencies. transitive_sources = [] uses_shared_libraries = [] imports", "depend on substrate-specific libraries for substrate variants. tags: Tags global", "substrate. testonly: As with `py_library`. srcs_version: As with `py_library`. \"\"\"", "size = numpy_size or size, srcs = numpy_srcs, main =", "= deps, tags = tags, srcs_version = srcs_version, timeout =", "of py_library labels. These are passed along. Args: ctx: Rule", "_substrate_deps(trimmed_deps, \"jax\"), srcs_version = srcs_version, testonly = testonly, ) #", "jax_omit_deps ] jax_srcs = _substrate_srcs(srcs, \"jax\") for src in srcs:", "in kwargs: kwargs[\"python_version\"] = \"PY3\" native.py_test(*args, **kwargs) def _resolve_omit_dep(dep): \"\"\"Resolves", "runfiles_dict = {} substrate = ctx.attr.substrate file_substr = \"_{}/_generated_\".format(substrate) for", "= _substrate_srcs(srcs, \"jax\") for src in srcs: native.genrule( name =", "in dep: # DisableOnExport # cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport if CcInfo", "testonly = testonly, ) # Add symlinks under tfp/substrates/numpy. substrate_runfiles_symlinks(", "[] uses_shared_libraries = [] imports = [] has_py2_only_sources = []", "size, srcs = numpy_srcs, main = _substrate_src(\"{}.py\".format(name), \"numpy\"), deps =", "& tests.\"\"\" # [internal] load python3.bzl NO_REWRITE_NEEDED = [ \"internal:all_util\",", "ANY KIND, either express or implied. # See the License", "substrate) return \"{}:{}.{}\".format(dep, dep.split(\"/\")[-1], substrate) return dep def _substrate_deps(deps, substrate):", "the License. # You may obtain a copy of the", "**kwargs): \"\"\"Internal/external reversibility, denotes py3-only vs py2+3 tests. Args: *args:", "deps = _substrate_deps(trimmed_deps, \"jax\"), srcs_version = srcs_version, testonly = testonly,", "will have a dedicated `py2and3_test` suffixed with '.tf', '.numpy', or", "\"numpy\") native.genrule( name = \"rewrite_{}_numpy\".format(name), srcs = srcs, outs =", "t in tags] tags.append(name_tag) tags.append(\"multi_substrate\") native.py_test( name = \"{}.tf\".format(name), size", "# See the License for the specific language governing permissions", "\"jax\") # [internal] Add JAX build dep py3_test( name =", "py3_test( name = \"{}.numpy\".format(name), size = numpy_size or size, srcs", "\"{}.jax\".format(name), size = jax_size or size, srcs = jax_srcs, main", "testonly, ) def multi_substrate_py_test( name, size = \"small\", jax_size =", "substrates_omit_deps = [], jax_omit_deps = [], numpy_omit_deps = [], testonly", "size = jax_size or size, srcs = jax_srcs, main =", "main = _substrate_src(\"{}.py\".format(name), \"jax\"), deps = jax_deps, tags = tags", "cmd = \"$(location {}) $(SRCS) --omit_deps={} > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_numpy),", "uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) # if PyCcLinkParamsProvider in dep: #", "[], numpy_omit_deps = [], testonly = 0, srcs_version = \"PY2AND3\"):", "srcs: native.genrule( name = \"rewrite_{}_numpy\".format(src.replace(\".\", \"_\")), srcs = [src], outs", "DisableOnExport if CcInfo in dep: cc_infos.append(dep[CcInfo]) # Determine the set", "cc_infos.append(dep[CcInfo]) # Determine the set of symlinks to generate. transitive_sources", "appended. srcs: As with `py_library`. A `genrule` is used to", "_substrate_src(\"{}.py\".format(name), \"numpy\"), deps = _substrate_deps(deps, \"numpy\"), tags = tags +", "py2+3 tests. Args: *args: Passed to underlying py_test. **kwargs: Passed", "the runfiles structure. Build rule attributes: - substrate: One of", "symlinks under tfp/substrates/numpy. substrate_runfiles_symlinks( name = \"{}.numpy\".format(name), substrate = \"numpy\",", "\"jax\"), deps = jax_deps, tags = tags + [\"tfp_jax\"] +", "), tools = [REWRITER_TARGET], ) native.py_library( name = \"{}.jax.raw\".format(name), srcs", "in dep: transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) # if PyCcLinkParamsProvider", ") # Add symlinks under tfp/substrates/jax. substrate_runfiles_symlinks( name = \"{}.jax\".format(name),", "\"$(location {}) $(SRCS) --omit_deps={} > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_numpy), ), tools", "_substrate_dep(dep, substrate): \"\"\"Convert a single dep to one appropriate for", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "Build rule attributes: - substrate: One of 'jax' or 'numpy';", "writing, software # distributed under the License is distributed on", "= \"{}.jax.raw\".format(name), srcs = jax_srcs, deps = _substrate_deps(trimmed_deps, \"jax\"), srcs_version", "\"{}.tf\".format(name), size = size, srcs = srcs, main = \"{}.py\".format(name),", "build rule which adds runfiles symlinks for files matching a", "srcs_version, testonly = testonly, ) remove_deps = [ \"//third_party/py/tensorflow\", \"//third_party/py/tensorflow:tensorflow\",", "JAX substrates. deps: As with `py_library`. The list is rewritten", "= srcs, deps = deps, srcs_version = srcs_version, testonly =", "= [ _resolve_omit_dep(dep) for dep in substrates_omit_deps + numpy_omit_deps ]", "substrate_runfiles_symlinks( name = \"{}.numpy\".format(name), substrate = \"numpy\", deps = [\":{}.numpy.raw\".format(name)],", "with `py_library`. \"\"\" native.py_library( name = name, srcs = srcs,", "REWRITER_TARGET, \",\".join(resolved_omit_deps_numpy), ), tools = [REWRITER_TARGET], ) native.py_library( name =", "= srcs_version, testonly = testonly, ) remove_deps = [ \"//third_party/py/tensorflow\",", "PyInfo( transitive_sources = transitive_sources, uses_shared_libraries = any(uses_shared_libraries), imports = depset(transitive", "substrates_omit_deps: List of deps to omit if those libraries are", "[_substrate_src(src, substrate) for src in srcs] def _substrate_dep(dep, substrate): \"\"\"Convert", "_substrate_deps(deps, \"numpy\"), tags = tags + [\"tfp_numpy\"] + numpy_tags, srcs_version", "\"numpy\"), srcs_version = srcs_version, testonly = testonly, ) # Add", "a dedicated `py2and3_test` suffixed with '.tf', '.numpy', or '.jax' as", "return \"_{}/_generated_{}\".format(substrate, src) def _substrate_srcs(srcs, substrate): \"\"\"Rewrite src filenames for", "which covers TF, NumPy and JAX variants of the test.", "global to this test target. NumPy also gets a `'tfp_numpy'`", "for substrate variants. substrates_omit_deps: List of deps to omit if", "'.jax' appended. srcs: As with `py_library`. A `genrule` is used", "and pass along deps while adding the given symlinks to", "\"\"\" native.py_library( name = name, srcs = srcs, deps =", "of deps to omit for the JAX substrate. numpy_omit_deps: List", "As with `py_library`. A `genrule` is used to rewrite srcs", "srcs/deps/etc. py_info = PyInfo( transitive_sources = transitive_sources, uses_shared_libraries = any(uses_shared_libraries),", "JAX gets a `'tfp_jax'` tag. A `f'_{name}'` tag is used", "for the transitional period during which we have the internal", "> $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) jax_deps = _substrate_deps(deps, \"jax\")", "needed for the transitional period during which we have the", "[] imports = [] has_py2_only_sources = [] has_py3_only_sources = []", "Tags specific to the JAX test. (e.g. `\"notap\"`). disabled_substrates: Iterable", "_substrate_deps(trimmed_deps, \"numpy\"), srcs_version = srcs_version, testonly = testonly, ) #", "= [], substrates_omit_deps = [], jax_omit_deps = [], numpy_omit_deps =", "deps and add runfiles symlinks. \"\"\" # Aggregate the depset", "_substrate_deps(deps, substrate): \"\"\"Convert deps to those appropriate for the given", "= [], jax_tags = [], disabled_substrates = [], srcs_version =", "2019 The TensorFlow Probability Authors. # # Licensed under the", "under the License. # ============================================================================ \"\"\"Build defs for TF/NumPy/JAX-variadic libraries", "tags = [t for t in tags] tags.append(name_tag) tags.append(\"multi_substrate\") native.py_test(", "# [internal] load python3.bzl NO_REWRITE_NEEDED = [ \"internal:all_util\", \"internal:docstring_util\", \"internal:reparameterization\",", "Python srcs/deps/etc. py_info = PyInfo( transitive_sources = transitive_sources, uses_shared_libraries =", "NumPy, and JAX. Args: name: The TF `py_library` name. NumPy", "= [], testonly = 0, srcs_version = \"PY2AND3\"): \"\"\"A TFP", "+ numpy_omit_deps ] for src in srcs: native.genrule( name =", "= \"$(location {}) $(SRCS) --numpy_to_jax > $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET],", "reversibility, denotes py3-only vs py2+3 tests. Args: *args: Passed to", "substrate.\"\"\" return [_substrate_src(src, substrate) for src in srcs] def _substrate_dep(dep,", "= [], srcs_version = \"PY2AND3\", timeout = None, shard_count =", "= None, numpy_size = None, srcs = [], deps =", "in dep: dep = \"{}:{}\".format(dep, dep.split(\"/\")[-1]) if dep.startswith(\":\"): dep =", "= py_runfiles, root_symlinks = runfiles_dict, )) return py_info, py_cc_link_info, runfiles", "deps] backend_dep = \"//tensorflow_probability/python/internal/backend/{}\".format(substrate) if backend_dep not in new_deps: new_deps.append(backend_dep)", "$(SRCS) --omit_deps={} > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_numpy), ), tools = [REWRITER_TARGET],", "[] cc_infos = [] for dep in ctx.attr.deps: if PyInfo", "NumPy and JAX variants of the test. Each substrate will", "As with `py_library`. The list is rewritten to depend on", "have a dedicated `py2and3_test` suffixed with '.tf', '.numpy', or '.jax'", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "cc_common.merge_cc_infos(cc_infos = cc_infos) py_runfiles = depset( transitive = [depset(transitive =", "any(has_py2_only_sources), has_py3_only_sources = any(has_py3_only_sources), ) py_cc_link_info = cc_common.merge_cc_infos(cc_infos = cc_infos)", "[] has_py3_only_sources = [] cc_infos = [] for dep in", "native.py_test(*args, **kwargs) def _resolve_omit_dep(dep): \"\"\"Resolves a `substrates_omit_deps` item to full", "tags = [], numpy_tags = [], jax_tags = [], disabled_substrates", "in NO_REWRITE_NEEDED: if no_rewrite in dep_to_check: return dep if \"tensorflow_probability/\"", "structure. Build rule attributes: - substrate: One of 'jax' or", "rewritten to depend on substrate-specific libraries for substrate variants. tags:", "if backend_dep not in new_deps: new_deps.append(backend_dep) return new_deps # This", "libraries for substrate variants. substrates_omit_deps: List of deps to omit", "srcs = jax_srcs, deps = _substrate_deps(trimmed_deps, \"jax\"), srcs_version = srcs_version,", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "py_runfiles, root_symlinks = runfiles_dict, )) return py_info, py_cc_link_info, runfiles #", "[], substrates_omit_deps = [], jax_omit_deps = [], numpy_omit_deps = [],", "# cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport if CcInfo in dep: cc_infos.append(dep[CcInfo]) #", "List of deps to omit if those libraries are not", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "runfiles_dict, )) return py_info, py_cc_link_info, runfiles # See documentation at:", "the output structures to pass along Python srcs/deps/etc. py_info =", "appropriate. size: As with `py_test`. jax_size: A size override for", "not in substrates_omit_deps and dep not in remove_deps)] resolved_omit_deps_numpy =", "not in kwargs: kwargs[\"srcs_version\"] = \"PY3\" if \"python_version\" not in", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "`py2and3_test` for each of TF, NumPy, and JAX. Args: name:", ") if \"numpy\" not in disabled_substrates: numpy_srcs = _substrate_srcs(srcs, \"numpy\")", "if dep.startswith(\":\"): dep_to_check = \"{}{}\".format(native.package_name(), dep) for no_rewrite in NO_REWRITE_NEEDED:", "deps: A list of py_library labels. These are passed along.", "= [] imports = [] has_py2_only_sources = [] has_py3_only_sources =", "\",\".join(resolved_omit_deps_numpy), ), tools = [REWRITER_TARGET], ) native.py_library( name = \"{}.numpy.raw\".format(name),", "\"substrate\": attr.string(), \"deps\": attr.label_list(), }, ) def multi_substrate_py_library( name, srcs", "specific language governing permissions and # limitations under the License.", "in transitive_sources.to_list(): if \"tensorflow_probability\" in f.dirname and file_substr in f.short_path:", "\"rewrite_{}_jax\".format(src.replace(\".\", \"_\")), srcs = [src], outs = [_substrate_src(src, \"jax\")], cmd", "substrate genrule file pattern, i.e. `'_jax/_generated_normal.py'`. This rule will aggregate", "deps = [], substrates_omit_deps = [], jax_omit_deps = [], numpy_omit_deps", "disable, items from [\"numpy\", \"jax\"]. srcs_version: As with `py_test`. timeout:", "TF, NumPy and JAX variants of the test. Each substrate", "= \"{}.numpy\".format(name), size = numpy_size or size, srcs = numpy_srcs,", "JAX variants, writing the test file into a subdirectory. deps:", "# Determine the set of symlinks to generate. transitive_sources =", "= shard_count, ) if \"numpy\" not in disabled_substrates: numpy_srcs =", "and JAX. Args: name: The TF `py_library` name. NumPy and", "post = f.short_path.split(\"/python/\") out_path = \"{}/substrates/{}/{}\".format( pre, substrate, post.replace(file_substr, \"\"),", "size, srcs = srcs, main = \"{}.py\".format(name), deps = deps,", "# you may not use this file except in compliance", "{}) $(SRCS) --numpy_to_jax > $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) jax_deps", "generate python runfiles symlinks. A custom build rule which adds", "src) def _substrate_srcs(srcs, substrate): \"\"\"Rewrite src filenames for the given", "BUILD files. Otherwise the OSS export # rewrite process becomes", "= \"{}.numpy\".format(name), substrate = \"numpy\", deps = [\":{}.numpy.raw\".format(name)], testonly =", "for the given substrate.\"\"\" dep_to_check = dep if dep.startswith(\":\"): dep_to_check", "files. Otherwise the OSS export # rewrite process becomes irreversible.", "srcs_version, python_version = \"PY3\", timeout = timeout, shard_count = shard_count,", "in deps] backend_dep = \"//tensorflow_probability/python/internal/backend/{}\".format(substrate) if backend_dep not in new_deps:", "{}) $(SRCS) > $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) py3_test( name", "`\"PY3\"`) if not specified. \"\"\" kwargs = dict(kwargs) if \"srcs_version\"", "symlinks. A custom build rule which adds runfiles symlinks for", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "new_deps.append(backend_dep) return new_deps # This is needed for the transitional", "\"numpy\", deps = [\":{}.numpy.raw\".format(name)], testonly = testonly, ) resolved_omit_deps_jax =", "substrates_omit_deps and dep not in remove_deps)] resolved_omit_deps_numpy = [ _resolve_omit_dep(dep)", "dep: return \"{}.{}\".format(dep, substrate) return \"{}:{}.{}\".format(dep, dep.split(\"/\")[-1], substrate) return dep", "if those libraries are not rewritten for the substrates. jax_omit_deps:", "dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files, ]) for dep in ctx.attr.deps], ) runfiles =", "= [depset(transitive = [ dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files, ]) for dep in", "= jax_srcs, cmd = \"$(location {}) $(SRCS) --numpy_to_jax > $@\".format(REWRITER_TARGET),", "name = \"{}.jax\".format(name), size = jax_size or size, srcs =", "are added (with value `\"PY3\"`) if not specified. \"\"\" kwargs", "under the Apache License, Version 2.0 (the \"License\"); # you", "dep.split(\"/\")[-1]) if dep.startswith(\":\"): dep = \"{}{}\".format(native.package_name(), dep) return dep def", "numpy_size: A size override for the numpy target. srcs: As", "tfp/substrates/jax. substrate_runfiles_symlinks( name = \"{}.jax\".format(name), substrate = \"jax\", deps =", "srcs, main = \"{}.py\".format(name), deps = deps, tags = tags,", "kwargs[\"python_version\"] = \"PY3\" native.py_test(*args, **kwargs) def _resolve_omit_dep(dep): \"\"\"Resolves a `substrates_omit_deps`", "or dep.startswith(\":\"): if \"internal/backend\" in dep: return dep if \":\"", "= srcs_version, testonly = testonly, ) # Add symlinks under", "inputs to resolve transitive dependencies. transitive_sources = [] uses_shared_libraries =", "dep[DefaultInfo].default_runfiles.files, ]) for dep in ctx.attr.deps], ) runfiles = DefaultInfo(runfiles", "libraries & tests.\"\"\" # [internal] load python3.bzl NO_REWRITE_NEEDED = [", "dep) for no_rewrite in NO_REWRITE_NEEDED: if no_rewrite in dep_to_check: return", "_substrate_srcs(srcs, \"numpy\") native.genrule( name = \"rewrite_{}_numpy\".format(name), srcs = srcs, outs", "CcInfo in dep: cc_infos.append(dep[CcInfo]) # Determine the set of symlinks", "_substrate_srcs(srcs, \"jax\") native.genrule( name = \"rewrite_{}_jax\".format(name), srcs = srcs, outs", "and JAX gets a `'tfp_jax'` tag. A `f'_{name}'` tag is", "[ dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files, ]) for dep in ctx.attr.deps], ) runfiles", "= size, srcs = srcs, main = \"{}.py\".format(name), deps =", "dep: dep = \"{}:{}\".format(dep, dep.split(\"/\")[-1]) if dep.startswith(\":\"): dep = \"{}{}\".format(native.package_name(),", "which substrate this applies to. - deps: A list of", "\"internal:docstring_util\", \"internal:reparameterization\", \"layers\", \"platform_google\", ] REWRITER_TARGET = \"//tensorflow_probability/substrates/meta:rewrite\" RUNFILES_ROOT =", "to resolve transitive dependencies. transitive_sources = [] uses_shared_libraries = []", "`'tfp_jax'` tag. A `f'_{name}'` tag is used to produce the", "numpy_omit_deps ] for src in srcs: native.genrule( name = \"rewrite_{}_numpy\".format(src.replace(\".\",", "These will have a `genrule` emitted to rewrite NumPy and", "tools = [REWRITER_TARGET], ) native.py_library( name = \"{}.numpy.raw\".format(name), srcs =", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "to omit if those libraries are not rewritten for the", "tools = [REWRITER_TARGET], ) native.py_library( name = \"{}.jax.raw\".format(name), srcs =", "name, size = \"small\", jax_size = None, numpy_size = None,", "= \"{}.jax\".format(name), substrate = \"jax\", deps = [\":{}.jax.raw\".format(name)], testonly =", "\"//third_party/py/tensorflow\", \"//third_party/py/tensorflow:tensorflow\", ] trimmed_deps = [dep for dep in deps", "= jax_size or size, srcs = jax_srcs, main = _substrate_src(\"{}.py\".format(name),", "tags + [\"tfp_numpy\"] + numpy_tags, srcs_version = srcs_version, python_version =", "= [t for t in tags] tags.append(name_tag) tags.append(\"multi_substrate\") native.py_test( name", "= f.short_path.split(\"/python/\") out_path = \"{}/substrates/{}/{}\".format( pre, substrate, post.replace(file_substr, \"\"), )", "srcs_version: As with `py_test`. timeout: As with `py_test`. shard_count: As", "JAX libraries have '.numpy' and '.jax' appended. srcs: As with", "has_py2_only_sources = any(has_py2_only_sources), has_py3_only_sources = any(has_py3_only_sources), ) py_cc_link_info = cc_common.merge_cc_infos(cc_infos", "*args: Passed to underlying py_test. **kwargs: Passed to underlying py_test.", "which adds runfiles symlinks for files matching a substrate genrule", "deps = deps, tags = tags, srcs_version = srcs_version, timeout", "= \"PY3\" native.py_test(*args, **kwargs) def _resolve_omit_dep(dep): \"\"\"Resolves a `substrates_omit_deps` item", "--numpy_to_jax > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_jax), ), tools = [REWRITER_TARGET], )", "[ _resolve_omit_dep(dep) for dep in substrates_omit_deps + numpy_omit_deps ] for", "shard_count, ) native.test_suite( name = name, tags = [name_tag], )", "any(has_py3_only_sources), ) py_cc_link_info = cc_common.merge_cc_infos(cc_infos = cc_infos) py_runfiles = depset(", "return [_substrate_src(src, substrate) for src in srcs] def _substrate_dep(dep, substrate):", "[], srcs_version = \"PY2AND3\", timeout = None, shard_count = None):", "multi_substrate_py_test( name, size = \"small\", jax_size = None, numpy_size =", "remove_deps)] resolved_omit_deps_numpy = [ _resolve_omit_dep(dep) for dep in substrates_omit_deps +", "in dep: return dep if \":\" in dep: return \"{}.{}\".format(dep,", "numpy_srcs, cmd = \"$(location {}) $(SRCS) > $@\".format(REWRITER_TARGET), tools =", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "under tfp/substrates/numpy. substrate_runfiles_symlinks( name = \"{}.numpy\".format(name), substrate = \"numpy\", deps", "cmd = \"$(location {}) $(SRCS) --omit_deps={} --numpy_to_jax > $@\".format( REWRITER_TARGET,", "= \"{}.jax\".format(name), size = jax_size or size, srcs = jax_srcs,", "f.short_path.split(\"/python/\") out_path = \"{}/substrates/{}/{}\".format( pre, substrate, post.replace(file_substr, \"\"), ) runfiles_dict[RUNFILES_ROOT", "the JAX substrate. numpy_omit_deps: List of deps to omit for", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "(e.g. `\"notap\"`). jax_tags: Tags specific to the JAX test. (e.g.", "srcs_version, testonly = testonly, ) # Add symlinks under tfp/substrates/numpy.", "the OSS export # rewrite process becomes irreversible. def py3_test(*args,", "This is needed for the transitional period during which we", "given substrate.\"\"\" new_deps = [_substrate_dep(dep, substrate) for dep in deps]", "[_substrate_src(src, \"numpy\")], cmd = \"$(location {}) $(SRCS) --omit_deps={} > $@\".format(", "substrate variants. substrates_omit_deps: List of deps to omit if those", "py_info, py_cc_link_info, runfiles # See documentation at: # https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks", "to generate python runfiles symlinks. A custom build rule which", "in substrates_omit_deps + numpy_omit_deps ] for src in srcs: native.genrule(", "\"srcs_version\" not in kwargs: kwargs[\"srcs_version\"] = \"PY3\" if \"python_version\" not", "vs py2+3 tests. Args: *args: Passed to underlying py_test. **kwargs:", "dependencies. transitive_sources = [] uses_shared_libraries = [] imports = []", "for dep in substrates_omit_deps + numpy_omit_deps ] for src in", "substrate, post.replace(file_substr, \"\"), ) runfiles_dict[RUNFILES_ROOT + out_path] = f #", "omit for the NumPy substrate. testonly: As with `py_library`. srcs_version:", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "any(uses_shared_libraries), imports = depset(transitive = imports), has_py2_only_sources = any(has_py2_only_sources), has_py3_only_sources", "has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) # if PyCcLinkParamsProvider in dep: # DisableOnExport # cc_infos.append(dep[PyCcLinkParamsProvider].cc_info)", "============================================================================ \"\"\"Build defs for TF/NumPy/JAX-variadic libraries & tests.\"\"\" # [internal]", "= imports), has_py2_only_sources = any(has_py2_only_sources), has_py3_only_sources = any(has_py3_only_sources), ) py_cc_link_info", "for the given substrate.\"\"\" return [_substrate_src(src, substrate) for src in", "size = \"small\", jax_size = None, numpy_size = None, srcs", "[_substrate_src(src, \"jax\")], cmd = \"$(location {}) $(SRCS) --omit_deps={} --numpy_to_jax >", "= numpy_srcs, main = _substrate_src(\"{}.py\".format(name), \"numpy\"), deps = _substrate_deps(deps, \"numpy\"),", "if \"tensorflow_probability\" in f.dirname and file_substr in f.short_path: pre, post", "if \":\" not in dep: dep = \"{}:{}\".format(dep, dep.split(\"/\")[-1]) if", "Add symlinks under tfp/substrates/jax. substrate_runfiles_symlinks( name = \"{}.jax\".format(name), substrate =", "= [], deps = [], substrates_omit_deps = [], jax_omit_deps =", "analysis context. Returns: Info objects to propagate deps and add", "= _substrate_srcs(srcs, \"numpy\"), deps = _substrate_deps(trimmed_deps, \"numpy\"), srcs_version = srcs_version,", "jax_size or size, srcs = jax_srcs, main = _substrate_src(\"{}.py\".format(name), \"jax\"),", "[] has_py2_only_sources = [] has_py3_only_sources = [] cc_infos = []", "single src filename for the given substrate.\"\"\" return \"_{}/_generated_{}\".format(substrate, src)", "for substrate variants. tags: Tags global to this test target.", "this test target. NumPy also gets a `'tfp_numpy'` tag, and", "the test file into a subdirectory. deps: As with `py_test`.", "to underlying py_test. **kwargs: Passed to underlying py_test. srcs_version and", "f.dirname and file_substr in f.short_path: pre, post = f.short_path.split(\"/python/\") out_path", "As with `py_test`. The list is rewritten to depend on", "JAX variants of the test. Each substrate will have a", "py_cc_link_info, runfiles # See documentation at: # https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks =", "in srcs] def _substrate_dep(dep, substrate): \"\"\"Convert a single dep to", "\"\"\"A custom BUILD rule to generate python runfiles symlinks. A", "native.py_library( name = \"{}.jax.raw\".format(name), srcs = jax_srcs, deps = _substrate_deps(trimmed_deps,", "tags = tags + [\"tfp_jax\"] + jax_tags, srcs_version = srcs_version,", "= PyInfo( transitive_sources = transitive_sources, uses_shared_libraries = any(uses_shared_libraries), imports =", "\"{}/substrates/{}/{}\".format( pre, substrate, post.replace(file_substr, \"\"), ) runfiles_dict[RUNFILES_ROOT + out_path] =", "to full target.\"\"\" if \":\" not in dep: dep =", "to depend on substrate-specific libraries for substrate variants. substrates_omit_deps: List", "jax_omit_deps: List of deps to omit for the JAX substrate.", "'.jax' as appropriate. size: As with `py_test`. jax_size: A size", "as appropriate. size: As with `py_test`. jax_size: A size override", "use this file except in compliance with the License. #", "cmd = \"$(location {}) $(SRCS) --numpy_to_jax > $@\".format(REWRITER_TARGET), tools =", "transitive_sources, uses_shared_libraries = any(uses_shared_libraries), imports = depset(transitive = imports), has_py2_only_sources", "None, numpy_size = None, srcs = [], deps = [],", "imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) # if PyCcLinkParamsProvider in dep: # DisableOnExport", "= [src], outs = [_substrate_src(src, \"jax\")], cmd = \"$(location {})", "rewritten for the substrates. jax_omit_deps: List of deps to omit", "Returns: Info objects to propagate deps and add runfiles symlinks.", "substrates. deps: As with `py_library`. The list is rewritten to", "i.e. `'_jax/_generated_normal.py'`. This rule will aggregate and pass along deps", "rewrite process becomes irreversible. def py3_test(*args, **kwargs): \"\"\"Internal/external reversibility, denotes", "used to rewrite srcs for NumPy and JAX substrates. deps:", "size, srcs = jax_srcs, main = _substrate_src(\"{}.py\".format(name), \"jax\"), deps =", "dep = \"{}{}\".format(native.package_name(), dep) return dep def _substrate_runfiles_symlinks_impl(ctx): \"\"\"A custom", "substrate): \"\"\"Rewrite src filenames for the given substrate.\"\"\" return [_substrate_src(src,", "gets a `'tfp_numpy'` tag, and JAX gets a `'tfp_jax'` tag.", "to. - deps: A list of py_library labels. These are", "for dep in deps] backend_dep = \"//tensorflow_probability/python/internal/backend/{}\".format(substrate) if backend_dep not", "during which we have the internal # py2and3_test and py_test", "\"\"), ) runfiles_dict[RUNFILES_ROOT + out_path] = f # Construct the", "'jax' or 'numpy'; which substrate this applies to. - deps:", "in compliance with the License. # You may obtain a", "software # distributed under the License is distributed on an", "remove_deps = [ \"//third_party/py/tensorflow\", \"//third_party/py/tensorflow:tensorflow\", ] trimmed_deps = [dep for", "TensorFlow Probability Authors. # # Licensed under the Apache License,", "pass along deps while adding the given symlinks to the", "to the runfiles structure. Build rule attributes: - substrate: One", "for each of TF, NumPy, and JAX. Args: name: Name", "dep in substrates_omit_deps + jax_omit_deps ] jax_srcs = _substrate_srcs(srcs, \"jax\")", "rule to generate python runfiles symlinks. A custom build rule", "produce the `test_suite`. numpy_tags: Tags specific to the NumPy test.", "deps = [\":{}.numpy.raw\".format(name)], testonly = testonly, ) resolved_omit_deps_jax = [", "return \"{}:{}.{}\".format(dep, dep.split(\"/\")[-1], substrate) return dep def _substrate_deps(deps, substrate): \"\"\"Convert", "for dep in deps if (dep not in substrates_omit_deps and", "build dep py3_test( name = \"{}.jax\".format(name), size = jax_size or", "`py_test`. These will have a `genrule` emitted to rewrite NumPy", "DisableOnExport # cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport if CcInfo in dep: cc_infos.append(dep[CcInfo])", "= [], numpy_tags = [], jax_tags = [], disabled_substrates =", "= rule( implementation = _substrate_runfiles_symlinks_impl, attrs = { \"substrate\": attr.string(),", "\"{}.jax.raw\".format(name), srcs = jax_srcs, deps = _substrate_deps(trimmed_deps, \"jax\"), srcs_version =", "if CcInfo in dep: cc_infos.append(dep[CcInfo]) # Determine the set of", "The TF `py_library` name. NumPy and JAX libraries have '.numpy'", "[\":{}.jax.raw\".format(name)], testonly = testonly, ) def multi_substrate_py_test( name, size =", "# [internal] Add JAX build dep py3_test( name = \"{}.jax\".format(name),", "runfiles_dict[RUNFILES_ROOT + out_path] = f # Construct the output structures", "and add runfiles symlinks. \"\"\" # Aggregate the depset inputs", "the numpy target. srcs: As with `py_test`. These will have", "= jax_deps, tags = tags + [\"tfp_jax\"] + jax_tags, srcs_version", "`py_library` name. NumPy and JAX libraries have '.numpy' and '.jax'", "TFP `py2and3_test` for each of TF, NumPy, and JAX. Args:", "for the JAX substrate. numpy_omit_deps: List of deps to omit", "with the License. # You may obtain a copy of", "jax_srcs = _substrate_srcs(srcs, \"jax\") native.genrule( name = \"rewrite_{}_jax\".format(name), srcs =", "= tags + [\"tfp_numpy\"] + numpy_tags, srcs_version = srcs_version, python_version", "`py_library`. The list is rewritten to depend on substrate-specific libraries", "srcs_version = \"PY2AND3\"): \"\"\"A TFP `py_library` for each of TF,", "shard_count = shard_count, ) native.test_suite( name = name, tags =", "not in kwargs: kwargs[\"python_version\"] = \"PY3\" native.py_test(*args, **kwargs) def _resolve_omit_dep(dep):", "those appropriate for the given substrate.\"\"\" new_deps = [_substrate_dep(dep, substrate)", "runfiles = DefaultInfo(runfiles = ctx.runfiles( transitive_files = py_runfiles, root_symlinks =", "the given symlinks to the runfiles structure. Build rule attributes:", "= testonly, ) resolved_omit_deps_jax = [ _resolve_omit_dep(dep) for dep in", "from [\"numpy\", \"jax\"]. srcs_version: As with `py_test`. timeout: As with", "timeout = timeout, shard_count = shard_count, ) if \"jax\" not", "[], deps = [], substrates_omit_deps = [], jax_omit_deps = [],", "dep_to_check = \"{}{}\".format(native.package_name(), dep) for no_rewrite in NO_REWRITE_NEEDED: if no_rewrite", "shard_count, ) if \"numpy\" not in disabled_substrates: numpy_srcs = _substrate_srcs(srcs,", "def _substrate_src(src, substrate): \"\"\"Rewrite a single src filename for the", "'.tf', '.numpy', or '.jax' as appropriate. size: As with `py_test`.", "express or implied. # See the License for the specific", "size override for the JAX target. numpy_size: A size override", "] REWRITER_TARGET = \"//tensorflow_probability/substrates/meta:rewrite\" RUNFILES_ROOT = \"tensorflow_probability/\" def _substrate_src(src, substrate):", "except in compliance with the License. # You may obtain", "= dep if dep.startswith(\":\"): dep_to_check = \"{}{}\".format(native.package_name(), dep) for no_rewrite", "while adding the given symlinks to the runfiles structure. Build", "outs = numpy_srcs, cmd = \"$(location {}) $(SRCS) > $@\".format(REWRITER_TARGET),", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "_resolve_omit_dep(dep) for dep in substrates_omit_deps + numpy_omit_deps ] for src", "= \"//tensorflow_probability/substrates/meta:rewrite\" RUNFILES_ROOT = \"tensorflow_probability/\" def _substrate_src(src, substrate): \"\"\"Rewrite a", "along Python srcs/deps/etc. py_info = PyInfo( transitive_sources = transitive_sources, uses_shared_libraries", "# Construct the output structures to pass along Python srcs/deps/etc.", "deps = jax_deps, tags = tags + [\"tfp_jax\"] + jax_tags,", "= \"rewrite_{}_numpy\".format(name), srcs = srcs, outs = numpy_srcs, cmd =", "is needed for the transitional period during which we have", "tools = [REWRITER_TARGET], ) py3_test( name = \"{}.numpy\".format(name), size =", "A `genrule` is used to rewrite srcs for NumPy and", "\"rewrite_{}_jax\".format(name), srcs = srcs, outs = jax_srcs, cmd = \"$(location", "substrate = \"numpy\", deps = [\":{}.numpy.raw\".format(name)], testonly = testonly, )", "CONDITIONS OF ANY KIND, either express or implied. # See", "of 'jax' or 'numpy'; which substrate this applies to. -", "[], numpy_tags = [], jax_tags = [], disabled_substrates = [],", "\"{}.numpy.raw\".format(name), srcs = _substrate_srcs(srcs, \"numpy\"), deps = _substrate_deps(trimmed_deps, \"numpy\"), srcs_version", "srcs = srcs, deps = deps, srcs_version = srcs_version, testonly", "name = \"{}.numpy.raw\".format(name), srcs = _substrate_srcs(srcs, \"numpy\"), deps = _substrate_deps(trimmed_deps,", "\"layers\", \"platform_google\", ] REWRITER_TARGET = \"//tensorflow_probability/substrates/meta:rewrite\" RUNFILES_ROOT = \"tensorflow_probability/\" def", "name, srcs = srcs, deps = deps, srcs_version = srcs_version,", "shard_count = None): \"\"\"A TFP `py2and3_test` for each of TF,", "None, srcs = [], deps = [], tags = [],", "\"//tensorflow_probability/substrates/meta:rewrite\" RUNFILES_ROOT = \"tensorflow_probability/\" def _substrate_src(src, substrate): \"\"\"Rewrite a single", "out_path] = f # Construct the output structures to pass", "in dep_to_check: return dep if \"tensorflow_probability/\" in dep or dep.startswith(\":\"):", "= jax_srcs, main = _substrate_src(\"{}.py\".format(name), \"jax\"), deps = jax_deps, tags", "dep.startswith(\":\"): dep = \"{}{}\".format(native.package_name(), dep) return dep def _substrate_runfiles_symlinks_impl(ctx): \"\"\"A", "of deps to omit if those libraries are not rewritten", "A size override for the numpy target. srcs: As with", "**kwargs: Passed to underlying py_test. srcs_version and python_version are added", "= \"PY3\" if \"python_version\" not in kwargs: kwargs[\"python_version\"] = \"PY3\"", "dep def _substrate_runfiles_symlinks_impl(ctx): \"\"\"A custom BUILD rule to generate python", "transitive_sources = transitive_sources, uses_shared_libraries = any(uses_shared_libraries), imports = depset(transitive =", "{} substrate = ctx.attr.substrate file_substr = \"_{}/_generated_\".format(substrate) for f in", "transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) # if PyCcLinkParamsProvider in dep:", "Passed to underlying py_test. **kwargs: Passed to underlying py_test. srcs_version", "tags.append(\"multi_substrate\") native.py_test( name = \"{}.tf\".format(name), size = size, srcs =", "TF, NumPy, and JAX. Args: name: Name of the `test_suite`", "= [ dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files, ]) for dep in ctx.attr.deps], )", "As with `py_library`. srcs_version: As with `py_library`. \"\"\" native.py_library( name", "symlinks. \"\"\" # Aggregate the depset inputs to resolve transitive", "backend_dep not in new_deps: new_deps.append(backend_dep) return new_deps # This is", "name = \"rewrite_{}_numpy\".format(src.replace(\".\", \"_\")), srcs = [src], outs = [_substrate_src(src,", "for the numpy target. srcs: As with `py_test`. These will", "native.py_test( name = \"{}.tf\".format(name), size = size, srcs = srcs,", "A `f'_{name}'` tag is used to produce the `test_suite`. numpy_tags:", "numpy_size or size, srcs = numpy_srcs, main = _substrate_src(\"{}.py\".format(name), \"numpy\"),", "substrate this applies to. - deps: A list of py_library", "depend on substrate-specific libraries for substrate variants. substrates_omit_deps: List of", "List of deps to omit for the JAX substrate. numpy_omit_deps:", "= name, srcs = srcs, deps = deps, srcs_version =", "the License. # ============================================================================ \"\"\"Build defs for TF/NumPy/JAX-variadic libraries &", "\"\"\"Convert deps to those appropriate for the given substrate.\"\"\" new_deps", "return dep if \"tensorflow_probability/\" in dep or dep.startswith(\":\"): if \"internal/backend\"", "jax_deps = _substrate_deps(deps, \"jax\") # [internal] Add JAX build dep", "= [ \"//third_party/py/tensorflow\", \"//third_party/py/tensorflow:tensorflow\", ] trimmed_deps = [dep for dep", "jax_deps, tags = tags + [\"tfp_jax\"] + jax_tags, srcs_version =", "[], jax_omit_deps = [], numpy_omit_deps = [], testonly = 0,", "name, srcs = [], deps = [], substrates_omit_deps = [],", "deps = [], tags = [], numpy_tags = [], jax_tags", "to depend on substrate-specific libraries for substrate variants. tags: Tags", "substrate) for dep in deps] backend_dep = \"//tensorflow_probability/python/internal/backend/{}\".format(substrate) if backend_dep", "appropriate for the given substrate.\"\"\" dep_to_check = dep if dep.startswith(\":\"):", "+ numpy_tags, srcs_version = srcs_version, python_version = \"PY3\", timeout =", "\"//tensorflow_probability/python/internal/backend/{}\".format(substrate) if backend_dep not in new_deps: new_deps.append(backend_dep) return new_deps #", "dep.split(\"/\")[-1], substrate) return dep def _substrate_deps(deps, substrate): \"\"\"Convert deps to", "python runfiles symlinks. A custom build rule which adds runfiles", "= \"{}{}\".format(native.package_name(), dep) return dep def _substrate_runfiles_symlinks_impl(ctx): \"\"\"A custom BUILD", "One of 'jax' or 'numpy'; which substrate this applies to.", "imports), has_py2_only_sources = any(has_py2_only_sources), has_py3_only_sources = any(has_py3_only_sources), ) py_cc_link_info =", "\"{}.numpy\".format(name), size = numpy_size or size, srcs = numpy_srcs, main", "and dep not in remove_deps)] resolved_omit_deps_numpy = [ _resolve_omit_dep(dep) for", "on substrate-specific libraries for substrate variants. substrates_omit_deps: List of deps", "def _substrate_runfiles_symlinks_impl(ctx): \"\"\"A custom BUILD rule to generate python runfiles", "NumPy and JAX substrates. deps: As with `py_library`. The list", "= ctx.attr.substrate file_substr = \"_{}/_generated_\".format(substrate) for f in transitive_sources.to_list(): if", "transitive_sources.to_list(): if \"tensorflow_probability\" in f.dirname and file_substr in f.short_path: pre,", "a `genrule` emitted to rewrite NumPy and JAX variants, writing", "for f in transitive_sources.to_list(): if \"tensorflow_probability\" in f.dirname and file_substr", "= [] for dep in ctx.attr.deps: if PyInfo in dep:", "= testonly, ) # Add symlinks under tfp/substrates/numpy. substrate_runfiles_symlinks( name", "0, srcs_version = \"PY2AND3\"): \"\"\"A TFP `py_library` for each of", "\"tensorflow_probability/\" in dep or dep.startswith(\":\"): if \"internal/backend\" in dep: return", "JAX. Args: name: The TF `py_library` name. NumPy and JAX", "Construct the output structures to pass along Python srcs/deps/etc. py_info", "deps = [\":{}.jax.raw\".format(name)], testonly = testonly, ) def multi_substrate_py_test( name,", "= _substrate_src(\"{}.py\".format(name), \"jax\"), deps = jax_deps, tags = tags +", "testonly: As with `py_library`. srcs_version: As with `py_library`. \"\"\" native.py_library(", "outs = jax_srcs, cmd = \"$(location {}) $(SRCS) --numpy_to_jax >", "applies to. - deps: A list of py_library labels. These", "substrate = ctx.attr.substrate file_substr = \"_{}/_generated_\".format(substrate) for f in transitive_sources.to_list():", "and py_test comingling in BUILD files. Otherwise the OSS export", ") # Add symlinks under tfp/substrates/numpy. substrate_runfiles_symlinks( name = \"{}.numpy\".format(name),", "documentation at: # https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks = rule( implementation = _substrate_runfiles_symlinks_impl,", "[t for t in tags] tags.append(name_tag) tags.append(\"multi_substrate\") native.py_test( name =", "to rewrite srcs for NumPy and JAX substrates. deps: As", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "), tools = [REWRITER_TARGET], ) native.py_library( name = \"{}.numpy.raw\".format(name), srcs", "tfp/substrates/numpy. substrate_runfiles_symlinks( name = \"{}.numpy\".format(name), substrate = \"numpy\", deps =", "the internal # py2and3_test and py_test comingling in BUILD files.", "= timeout, shard_count = shard_count, ) native.test_suite( name = name,", "load python3.bzl NO_REWRITE_NEEDED = [ \"internal:all_util\", \"internal:docstring_util\", \"internal:reparameterization\", \"layers\", \"platform_google\",", "TF/NumPy/JAX-variadic libraries & tests.\"\"\" # [internal] load python3.bzl NO_REWRITE_NEEDED =", "srcs, outs = jax_srcs, cmd = \"$(location {}) $(SRCS) --numpy_to_jax", "REWRITER_TARGET = \"//tensorflow_probability/substrates/meta:rewrite\" RUNFILES_ROOT = \"tensorflow_probability/\" def _substrate_src(src, substrate): \"\"\"Rewrite", ") jax_deps = _substrate_deps(deps, \"jax\") # [internal] Add JAX build", "= timeout, shard_count = shard_count, ) if \"jax\" not in", "None, shard_count = None): \"\"\"A TFP `py2and3_test` for each of", "root_symlinks = runfiles_dict, )) return py_info, py_cc_link_info, runfiles # See", "JAX test. (e.g. `\"notap\"`). disabled_substrates: Iterable of substrates to disable,", "and JAX variants, writing the test file into a subdirectory.", "variants of the test. Each substrate will have a dedicated", "NumPy test. (e.g. `\"notap\"`). jax_tags: Tags specific to the JAX", "As with `py_library`. \"\"\" native.py_library( name = name, srcs =", "Version 2.0 (the \"License\"); # you may not use this", "`genrule` is used to rewrite srcs for NumPy and JAX", "the depset inputs to resolve transitive dependencies. transitive_sources = []", "[], disabled_substrates = [], srcs_version = \"PY2AND3\", timeout = None,", "= [] uses_shared_libraries = [] imports = [] has_py2_only_sources =", "+ [\"tfp_numpy\"] + numpy_tags, srcs_version = srcs_version, python_version = \"PY3\",", "# Add symlinks under tfp/substrates/jax. substrate_runfiles_symlinks( name = \"{}.jax\".format(name), substrate", "\"jax\"]. srcs_version: As with `py_test`. timeout: As with `py_test`. shard_count:", "\"PY2AND3\", timeout = None, shard_count = None): \"\"\"A TFP `py2and3_test`", "--omit_deps={} --numpy_to_jax > $@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_jax), ), tools = [REWRITER_TARGET],", "new_deps # This is needed for the transitional period during", "by applicable law or agreed to in writing, software #", "or 'numpy'; which substrate this applies to. - deps: A", "[src], outs = [_substrate_src(src, \"numpy\")], cmd = \"$(location {}) $(SRCS)", "will have a `genrule` emitted to rewrite NumPy and JAX", "jax_omit_deps = [], numpy_omit_deps = [], testonly = 0, srcs_version", "with `py_library`. A `genrule` is used to rewrite srcs for", "testonly = testonly, ) def multi_substrate_py_test( name, size = \"small\",", "pattern, i.e. `'_jax/_generated_normal.py'`. This rule will aggregate and pass along", "disabled_substrates: Iterable of substrates to disable, items from [\"numpy\", \"jax\"].", "= [REWRITER_TARGET], ) py3_test( name = \"{}.numpy\".format(name), size = numpy_size", "ctx.runfiles( transitive_files = py_runfiles, root_symlinks = runfiles_dict, )) return py_info,", "if \"numpy\" not in disabled_substrates: numpy_srcs = _substrate_srcs(srcs, \"numpy\") native.genrule(", "substrate) return dep def _substrate_deps(deps, substrate): \"\"\"Convert deps to those", "= None, shard_count = None): \"\"\"A TFP `py2and3_test` for each", "- deps: A list of py_library labels. These are passed", "= None): \"\"\"A TFP `py2and3_test` for each of TF, NumPy,", "if \"python_version\" not in kwargs: kwargs[\"python_version\"] = \"PY3\" native.py_test(*args, **kwargs)", "applicable law or agreed to in writing, software # distributed", "_substrate_src(src, substrate): \"\"\"Rewrite a single src filename for the given", "runfiles symlinks. A custom build rule which adds runfiles symlinks", "numpy target. srcs: As with `py_test`. These will have a", "custom build rule which adds runfiles symlinks for files matching", "in substrates_omit_deps and dep not in remove_deps)] resolved_omit_deps_numpy = [", "backend_dep = \"//tensorflow_probability/python/internal/backend/{}\".format(substrate) if backend_dep not in new_deps: new_deps.append(backend_dep) return", "Passed to underlying py_test. srcs_version and python_version are added (with", "= srcs, main = \"{}.py\".format(name), deps = deps, tags =", "<filename>tensorflow_probability/python/build_defs.bzl # Copyright 2019 The TensorFlow Probability Authors. # #", "and python_version are added (with value `\"PY3\"`) if not specified.", "substrate-specific libraries for substrate variants. substrates_omit_deps: List of deps to", "Authors. # # Licensed under the Apache License, Version 2.0", "testonly = 0, srcs_version = \"PY2AND3\"): \"\"\"A TFP `py_library` for", "for files matching a substrate genrule file pattern, i.e. `'_jax/_generated_normal.py'`.", "srcs: native.genrule( name = \"rewrite_{}_jax\".format(src.replace(\".\", \"_\")), srcs = [src], outs", "\"_{}/_generated_\".format(substrate) for f in transitive_sources.to_list(): if \"tensorflow_probability\" in f.dirname and", "# You may obtain a copy of the License at", "PyCcLinkParamsProvider in dep: # DisableOnExport # cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport if", "dep_to_check: return dep if \"tensorflow_probability/\" in dep or dep.startswith(\":\"): if", "deps = _substrate_deps(trimmed_deps, \"numpy\"), srcs_version = srcs_version, testonly = testonly,", "a substrate genrule file pattern, i.e. `'_jax/_generated_normal.py'`. This rule will", "= runfiles_dict, )) return py_info, py_cc_link_info, runfiles # See documentation", "with `py_test`. jax_size: A size override for the JAX target.", "[\"numpy\", \"jax\"]. srcs_version: As with `py_test`. timeout: As with `py_test`.", "dep in substrates_omit_deps + numpy_omit_deps ] for src in srcs:", "= [] has_py2_only_sources = [] has_py3_only_sources = [] cc_infos =", "\"{}{}\".format(native.package_name(), dep) for no_rewrite in NO_REWRITE_NEEDED: if no_rewrite in dep_to_check:", "srcs = srcs, main = \"{}.py\".format(name), deps = deps, tags", "given symlinks to the runfiles structure. Build rule attributes: -", "> $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) py3_test( name = \"{}.numpy\".format(name),", "value `\"PY3\"`) if not specified. \"\"\" kwargs = dict(kwargs) if", "appropriate for the given substrate.\"\"\" new_deps = [_substrate_dep(dep, substrate) for", "dep or dep.startswith(\":\"): if \"internal/backend\" in dep: return dep if", "kwargs = dict(kwargs) if \"srcs_version\" not in kwargs: kwargs[\"srcs_version\"] =", "# DisableOnExport if CcInfo in dep: cc_infos.append(dep[CcInfo]) # Determine the", "to generate. transitive_sources = depset(transitive = transitive_sources) runfiles_dict = {}", "# DisableOnExport # cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport if CcInfo in dep:", "\"PY3\", timeout = timeout, shard_count = shard_count, ) if \"jax\"", "$(SRCS) > $@\".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) py3_test( name =", "# https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks = rule( implementation = _substrate_runfiles_symlinks_impl, attrs =", "Name of the `test_suite` which covers TF, NumPy and JAX", "not in disabled_substrates: numpy_srcs = _substrate_srcs(srcs, \"numpy\") native.genrule( name =", "https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks = rule( implementation = _substrate_runfiles_symlinks_impl, attrs = {", "the `test_suite` which covers TF, NumPy and JAX variants of", "return dep def _substrate_deps(deps, substrate): \"\"\"Convert deps to those appropriate", "numpy_size = None, srcs = [], deps = [], tags", "the substrates. jax_omit_deps: List of deps to omit for the", "= shard_count, ) if \"jax\" not in disabled_substrates: jax_srcs =", "in dep: cc_infos.append(dep[CcInfo]) # Determine the set of symlinks to", "\"License\"); # you may not use this file except in", "= \"PY2AND3\", timeout = None, shard_count = None): \"\"\"A TFP", "$@\".format( REWRITER_TARGET, \",\".join(resolved_omit_deps_numpy), ), tools = [REWRITER_TARGET], ) native.py_library( name", "name = \"rewrite_{}_numpy\".format(name), srcs = srcs, outs = numpy_srcs, cmd", "jax_tags, srcs_version = srcs_version, python_version = \"PY3\", timeout = timeout,", "\"python_version\" not in kwargs: kwargs[\"python_version\"] = \"PY3\" native.py_test(*args, **kwargs) def", "runfiles symlinks for files matching a substrate genrule file pattern,", "name = name, srcs = srcs, deps = deps, srcs_version", "a subdirectory. deps: As with `py_test`. The list is rewritten", "covers TF, NumPy and JAX variants of the test. Each", "post.replace(file_substr, \"\"), ) runfiles_dict[RUNFILES_ROOT + out_path] = f # Construct", "_substrate_src(\"{}.py\".format(name), \"jax\"), deps = jax_deps, tags = tags + [\"tfp_jax\"]", "period during which we have the internal # py2and3_test and", "for dep in substrates_omit_deps + jax_omit_deps ] jax_srcs = _substrate_srcs(srcs,", "As with `py_test`. timeout: As with `py_test`. shard_count: As with", "\"internal:reparameterization\", \"layers\", \"platform_google\", ] REWRITER_TARGET = \"//tensorflow_probability/substrates/meta:rewrite\" RUNFILES_ROOT = \"tensorflow_probability/\"", "substrates. jax_omit_deps: List of deps to omit for the JAX", "at: # https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks = rule( implementation = _substrate_runfiles_symlinks_impl, attrs", "ctx.attr.substrate file_substr = \"_{}/_generated_\".format(substrate) for f in transitive_sources.to_list(): if \"tensorflow_probability\"", "= \"_{}/_generated_\".format(substrate) for f in transitive_sources.to_list(): if \"tensorflow_probability\" in f.dirname", "for src in srcs] def _substrate_dep(dep, substrate): \"\"\"Convert a single" ]
[ "2.0 (the \"License\"); # you may not use this file", "from typing import Mapping, MutableSequence, Sequence, Type, TypeVar import requests", "as progress_bar_files, tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} bytes\", total=sum(dump_file.size for dump_file", "from pydantic import validator from tqdm import tqdm # type:", "_T_WikidataDumpFile = TypeVar(\"_T_WikidataDumpFile\", bound=WikidataDumpFile) class WikidataDump: def __init__( self, dump_dir:", "WikidataDumpPagesMetaHistory, ) from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable _LOGGER = getLogger(__name__) _T_WikidataDumpFile", "\"sitestable\" )[0] self.pages_meta_history: Final = RangeMap[WikidataDumpPagesMetaHistory]() for dump_file in self._construct_dumps(", "-> datetime: # noqa: N805 return datetime.strptime(value, \"%Y-%m-%d %H:%M:%S\") class", "dump_file def download( self, *, sites_table: bool = True, pages_meta_history:", "not path.exists(): url = f\"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json\" _LOGGER.debug(f\"Downloading Wikidata dump status from", "None: _LOGGER.info( f\"Downloading Wikidata dump {self.version:%4Y%2m%2d} from '{self.mirror}'.\" ) dump_files:", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "{self.version:%4Y%2m%2d} files\", total=len(dump_files), dynamic_ncols=True, position=1, ) as progress_bar_files, tqdm( desc=f\"Wikidata", "self.mirror: Final = mirror self._dump_status = _WikidataDumpStatus.load( self._dump_dir, self.version, self.mirror", "as progress_bar_size: for dump_file in dump_files: dump_file.download() progress_bar_files.update(1) progress_bar_size.update(dump_file.size) _LOGGER.info(f\"Done", "= _WikidataDumpStatus.parse_file(path) for job_name, job in dump_status.jobs.items(): if job.status !=", "total=len(dump_files), dynamic_ncols=True, position=1, ) as progress_bar_files, tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d}", "raise Exception(f\"Job '{job_name}' is not 'done', but '{job.status}'.\") return dump_status", "BaseModel as PydanticModel from pydantic import validator from tqdm import", "_construct_dumps( self, dump_type: Type[_T_WikidataDumpFile], dump_type_id: str ) -> Sequence[_T_WikidataDumpFile]: return", "noqa: N805 return datetime.strptime(value, \"%Y-%m-%d %H:%M:%S\") class _WikidataDumpStatus(PydanticModel): jobs: Mapping[str,", "wikidated._utils import RangeMap from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile from wikidated.wikidata.wikidata_dump_pages_meta_history import", "Path, version: date, mirror: str) -> _WikidataDumpStatus: path = dump_dir", "version: str @classmethod def load(cls, dump_dir: Path, version: date, mirror:", "import annotations import json from datetime import date, datetime from", "use this file except in compliance with the License. #", "# # Copyright 2021 <NAME> # # Licensed under the", "Type[_T_WikidataDumpFile], dump_type_id: str ) -> Sequence[_T_WikidataDumpFile]: return [ dump_type( path=self._dump_dir", "RangeMap from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile from wikidated.wikidata.wikidata_dump_pages_meta_history import ( WikidataDumpPagesMetaHistory,", "typing import Mapping, MutableSequence, Sequence, Type, TypeVar import requests from", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "_WikidataDumpStatusFile(PydanticModel): size: int url: str md5: str sha1: str class", "str class _WikidataDumpStatusJob(PydanticModel): status: str updated: datetime files: Mapping[str, _WikidataDumpStatusFile]", "position=2, unit=\"B\", unit_scale=True, unit_divisor=1024, ) as progress_bar_size: for dump_file in", "License. # You may obtain a copy of the License", "_WikidataDumpStatus(PydanticModel): jobs: Mapping[str, _WikidataDumpStatusJob] version: str @classmethod def load(cls, dump_dir:", "Final = version self.mirror: Final = mirror self._dump_status = _WikidataDumpStatus.load(", "Final from wikidated._utils import RangeMap from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile from", "__init__( self, dump_dir: Path, version: date, *, mirror: str =", "under the License is distributed on an \"AS IS\" BASIS,", "dump status from '{url}'.\") response = requests.get(url) response.raise_for_status() path.parent.mkdir(exist_ok=True, parents=True)", "License for the specific language governing permissions and # limitations", "type: ignore from typing_extensions import Final from wikidated._utils import RangeMap", "path.unlink() raise Exception(f\"Job '{job_name}' is not 'done', but '{job.status}'.\") return", ") self.sites_table: Final = self._construct_dumps( WikidataDumpSitesTable, \"sitestable\" )[0] self.pages_meta_history: Final", "Wikidata dump {self.version:%4Y%2m%2d}.\") def _construct_dumps( self, dump_type: Type[_T_WikidataDumpFile], dump_type_id: str", "WikidataDumpSitesTable _LOGGER = getLogger(__name__) _T_WikidataDumpFile = TypeVar(\"_T_WikidataDumpFile\", bound=WikidataDumpFile) class WikidataDump:", "and # limitations under the License. # from __future__ import", "path=self._dump_dir / path, url=self.mirror + dump_status_file.url, sha1=dump_status_file.sha1, size=dump_status_file.size, ) for", "= dump_file def download( self, *, sites_table: bool = True,", "dump_type( path=self._dump_dir / path, url=self.mirror + dump_status_file.url, sha1=dump_status_file.sha1, size=dump_status_file.size, )", "-> None: self._dump_dir = dump_dir self.version: Final = version self.mirror:", "the License. # from __future__ import annotations import json from", "= getLogger(__name__) _T_WikidataDumpFile = TypeVar(\"_T_WikidataDumpFile\", bound=WikidataDumpFile) class WikidataDump: def __init__(", "in compliance with the License. # You may obtain a", "fd: fd.write(json.dumps(response.json(), indent=2) + \"\\n\") _LOGGER.debug(\"Done downloading Wikidata dump status.\")", "url=self.mirror + dump_status_file.url, sha1=dump_status_file.sha1, size=dump_status_file.size, ) for path, dump_status_file in", "software # distributed under the License is distributed on an", "updated: datetime files: Mapping[str, _WikidataDumpStatusFile] @validator(\"updated\", pre=True) def _parse_datetime(cls, value:", "import Final from wikidated._utils import RangeMap from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile", "dump_file in dump_files), dynamic_ncols=True, position=2, unit=\"B\", unit_scale=True, unit_divisor=1024, ) as", "Final = self._construct_dumps( WikidataDumpSitesTable, \"sitestable\" )[0] self.pages_meta_history: Final = RangeMap[WikidataDumpPagesMetaHistory]()", "in dump_status.jobs.items(): if job.status != \"done\": path.unlink() raise Exception(f\"Job '{job_name}'", "bytes\", total=sum(dump_file.size for dump_file in dump_files), dynamic_ncols=True, position=2, unit=\"B\", unit_scale=True,", "{self.version:%4Y%2m%2d} bytes\", total=sum(dump_file.size for dump_file in dump_files), dynamic_ncols=True, position=2, unit=\"B\",", "sha1: str class _WikidataDumpStatusJob(PydanticModel): status: str updated: datetime files: Mapping[str,", "validator from tqdm import tqdm # type: ignore from typing_extensions", "= RangeMap[WikidataDumpPagesMetaHistory]() for dump_file in self._construct_dumps( WikidataDumpPagesMetaHistory, \"metahistory7zdump\" ): self.pages_meta_history[dump_file.page_ids]", "RangeMap[WikidataDumpPagesMetaHistory]() for dump_file in self._construct_dumps( WikidataDumpPagesMetaHistory, \"metahistory7zdump\" ): self.pages_meta_history[dump_file.page_ids] =", "tqdm import tqdm # type: ignore from typing_extensions import Final", "from __future__ import annotations import json from datetime import date,", "import Mapping, MutableSequence, Sequence, Type, TypeVar import requests from pydantic", "Path, version: date, *, mirror: str = \"https://dumps.wikimedia.org\", ) ->", "logging import getLogger from pathlib import Path from typing import", "for dump_file in self._construct_dumps( WikidataDumpPagesMetaHistory, \"metahistory7zdump\" ): self.pages_meta_history[dump_file.page_ids] = dump_file", "@validator(\"updated\", pre=True) def _parse_datetime(cls, value: str) -> datetime: # noqa:", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} bytes\", total=sum(dump_file.size for dump_file in dump_files),", "2021 <NAME> # # Licensed under the Apache License, Version", "_WikidataDumpStatusJob] version: str @classmethod def load(cls, dump_dir: Path, version: date,", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "response.raise_for_status() path.parent.mkdir(exist_ok=True, parents=True) with path.open(\"w\", encoding=\"UTF-8\") as fd: fd.write(json.dumps(response.json(), indent=2)", "dump status.\") dump_status = _WikidataDumpStatus.parse_file(path) for job_name, job in dump_status.jobs.items():", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "for path, dump_status_file in self._dump_status.jobs[ dump_type_id ].files.items() ] class _WikidataDumpStatusFile(PydanticModel):", "to in writing, software # distributed under the License is", "getLogger from pathlib import Path from typing import Mapping, MutableSequence,", "size: int url: str md5: str sha1: str class _WikidataDumpStatusJob(PydanticModel):", "# See the License for the specific language governing permissions", "self._dump_dir = dump_dir self.version: Final = version self.mirror: Final =", "= TypeVar(\"_T_WikidataDumpFile\", bound=WikidataDumpFile) class WikidataDump: def __init__( self, dump_dir: Path,", "sites_table: dump_files.append(self.sites_table) if pages_meta_history: dump_files.extend(self.pages_meta_history.values()) with tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d}", "language governing permissions and # limitations under the License. #", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "path.exists(): url = f\"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json\" _LOGGER.debug(f\"Downloading Wikidata dump status from '{url}'.\")", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "with the License. # You may obtain a copy of", "from '{self.mirror}'.\" ) dump_files: MutableSequence[WikidataDumpFile] = [] if sites_table: dump_files.append(self.sites_table)", "self.pages_meta_history[dump_file.page_ids] = dump_file def download( self, *, sites_table: bool =", "path.parent.mkdir(exist_ok=True, parents=True) with path.open(\"w\", encoding=\"UTF-8\") as fd: fd.write(json.dumps(response.json(), indent=2) +", "as PydanticModel from pydantic import validator from tqdm import tqdm", "# limitations under the License. # from __future__ import annotations", "in self._construct_dumps( WikidataDumpPagesMetaHistory, \"metahistory7zdump\" ): self.pages_meta_history[dump_file.page_ids] = dump_file def download(", "\"done\": path.unlink() raise Exception(f\"Job '{job_name}' is not 'done', but '{job.status}'.\")", "self, dump_dir: Path, version: date, *, mirror: str = \"https://dumps.wikimedia.org\",", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "job in dump_status.jobs.items(): if job.status != \"done\": path.unlink() raise Exception(f\"Job", "json from datetime import date, datetime from logging import getLogger", "Mapping[str, _WikidataDumpStatusJob] version: str @classmethod def load(cls, dump_dir: Path, version:", "limitations under the License. # from __future__ import annotations import", "distributed under the License is distributed on an \"AS IS\"", "version self.mirror: Final = mirror self._dump_status = _WikidataDumpStatus.load( self._dump_dir, self.version,", "_WikidataDumpStatus: path = dump_dir / f\"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json\" if not path.exists(): url", "encoding=\"UTF-8\") as fd: fd.write(json.dumps(response.json(), indent=2) + \"\\n\") _LOGGER.debug(\"Done downloading Wikidata", "files\", total=len(dump_files), dynamic_ncols=True, position=1, ) as progress_bar_files, tqdm( desc=f\"Wikidata dump", "express or implied. # See the License for the specific", "# type: ignore from typing_extensions import Final from wikidated._utils import", "except in compliance with the License. # You may obtain", "= _WikidataDumpStatus.load( self._dump_dir, self.version, self.mirror ) self.sites_table: Final = self._construct_dumps(", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "sites_table: bool = True, pages_meta_history: bool = True ) ->", "pathlib import Path from typing import Mapping, MutableSequence, Sequence, Type,", "permissions and # limitations under the License. # from __future__", "<filename>src/wikidated/wikidata/wikidata_dump.py<gh_stars>1-10 # # Copyright 2021 <NAME> # # Licensed under", "writing, software # distributed under the License is distributed on", "Sequence[_T_WikidataDumpFile]: return [ dump_type( path=self._dump_dir / path, url=self.mirror + dump_status_file.url,", "True ) -> None: _LOGGER.info( f\"Downloading Wikidata dump {self.version:%4Y%2m%2d} from", "you may not use this file except in compliance with", ") for path, dump_status_file in self._dump_status.jobs[ dump_type_id ].files.items() ] class", "%H:%M:%S\") class _WikidataDumpStatus(PydanticModel): jobs: Mapping[str, _WikidataDumpStatusJob] version: str @classmethod def", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "date, *, mirror: str = \"https://dumps.wikimedia.org\", ) -> None: self._dump_dir", "dump_files.extend(self.pages_meta_history.values()) with tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} files\", total=len(dump_files), dynamic_ncols=True, position=1,", "load(cls, dump_dir: Path, version: date, mirror: str) -> _WikidataDumpStatus: path", "tqdm # type: ignore from typing_extensions import Final from wikidated._utils", "status from '{url}'.\") response = requests.get(url) response.raise_for_status() path.parent.mkdir(exist_ok=True, parents=True) with", "import validator from tqdm import tqdm # type: ignore from", "] class _WikidataDumpStatusFile(PydanticModel): size: int url: str md5: str sha1:", ") -> None: _LOGGER.info( f\"Downloading Wikidata dump {self.version:%4Y%2m%2d} from '{self.mirror}'.\"", "from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable _LOGGER = getLogger(__name__) _T_WikidataDumpFile = TypeVar(\"_T_WikidataDumpFile\",", "CONDITIONS OF ANY KIND, either express or implied. # See", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "= \"https://dumps.wikimedia.org\", ) -> None: self._dump_dir = dump_dir self.version: Final", "def _construct_dumps( self, dump_type: Type[_T_WikidataDumpFile], dump_type_id: str ) -> Sequence[_T_WikidataDumpFile]:", "date, datetime from logging import getLogger from pathlib import Path", "= dump_dir / f\"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json\" if not path.exists(): url = f\"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json\"", "= True ) -> None: _LOGGER.info( f\"Downloading Wikidata dump {self.version:%4Y%2m%2d}", "mirror self._dump_status = _WikidataDumpStatus.load( self._dump_dir, self.version, self.mirror ) self.sites_table: Final", "dynamic_ncols=True, position=1, ) as progress_bar_files, tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} bytes\",", "wikidated.wikidata.wikidata_dump_file import WikidataDumpFile from wikidated.wikidata.wikidata_dump_pages_meta_history import ( WikidataDumpPagesMetaHistory, ) from", "None: self._dump_dir = dump_dir self.version: Final = version self.mirror: Final", "Path from typing import Mapping, MutableSequence, Sequence, Type, TypeVar import", "url: str md5: str sha1: str class _WikidataDumpStatusJob(PydanticModel): status: str", "dump_files: MutableSequence[WikidataDumpFile] = [] if sites_table: dump_files.append(self.sites_table) if pages_meta_history: dump_files.extend(self.pages_meta_history.values())", "url = f\"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json\" _LOGGER.debug(f\"Downloading Wikidata dump status from '{url}'.\") response", "str ) -> Sequence[_T_WikidataDumpFile]: return [ dump_type( path=self._dump_dir / path,", "datetime files: Mapping[str, _WikidataDumpStatusFile] @validator(\"updated\", pre=True) def _parse_datetime(cls, value: str)", "Final = RangeMap[WikidataDumpPagesMetaHistory]() for dump_file in self._construct_dumps( WikidataDumpPagesMetaHistory, \"metahistory7zdump\" ):", ") -> Sequence[_T_WikidataDumpFile]: return [ dump_type( path=self._dump_dir / path, url=self.mirror", "path = dump_dir / f\"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json\" if not path.exists(): url =", "unit_divisor=1024, ) as progress_bar_size: for dump_file in dump_files: dump_file.download() progress_bar_files.update(1)", "datetime from logging import getLogger from pathlib import Path from", "pages_meta_history: bool = True ) -> None: _LOGGER.info( f\"Downloading Wikidata", "progress_bar_files.update(1) progress_bar_size.update(dump_file.size) _LOGGER.info(f\"Done downloading Wikidata dump {self.version:%4Y%2m%2d}.\") def _construct_dumps( self,", "ignore from typing_extensions import Final from wikidated._utils import RangeMap from", "unit_scale=True, unit_divisor=1024, ) as progress_bar_size: for dump_file in dump_files: dump_file.download()", "OR CONDITIONS OF ANY KIND, either express or implied. #", "job_name, job in dump_status.jobs.items(): if job.status != \"done\": path.unlink() raise", "return datetime.strptime(value, \"%Y-%m-%d %H:%M:%S\") class _WikidataDumpStatus(PydanticModel): jobs: Mapping[str, _WikidataDumpStatusJob] version:", "the License is distributed on an \"AS IS\" BASIS, #", "version: date, *, mirror: str = \"https://dumps.wikimedia.org\", ) -> None:", "requests.get(url) response.raise_for_status() path.parent.mkdir(exist_ok=True, parents=True) with path.open(\"w\", encoding=\"UTF-8\") as fd: fd.write(json.dumps(response.json(),", "dump_files), dynamic_ncols=True, position=2, unit=\"B\", unit_scale=True, unit_divisor=1024, ) as progress_bar_size: for", "version: date, mirror: str) -> _WikidataDumpStatus: path = dump_dir /", "*, sites_table: bool = True, pages_meta_history: bool = True )", "self._dump_status = _WikidataDumpStatus.load( self._dump_dir, self.version, self.mirror ) self.sites_table: Final =", "/ path, url=self.mirror + dump_status_file.url, sha1=dump_status_file.sha1, size=dump_status_file.size, ) for path,", "__future__ import annotations import json from datetime import date, datetime", "str = \"https://dumps.wikimedia.org\", ) -> None: self._dump_dir = dump_dir self.version:", "'{self.mirror}'.\" ) dump_files: MutableSequence[WikidataDumpFile] = [] if sites_table: dump_files.append(self.sites_table) if", "str @classmethod def load(cls, dump_dir: Path, version: date, mirror: str)", "annotations import json from datetime import date, datetime from logging", "mirror: str) -> _WikidataDumpStatus: path = dump_dir / f\"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json\" if", "from logging import getLogger from pathlib import Path from typing", "Copyright 2021 <NAME> # # Licensed under the Apache License,", "law or agreed to in writing, software # distributed under", "_WikidataDumpStatusJob(PydanticModel): status: str updated: datetime files: Mapping[str, _WikidataDumpStatusFile] @validator(\"updated\", pre=True)", "dump_file.download() progress_bar_files.update(1) progress_bar_size.update(dump_file.size) _LOGGER.info(f\"Done downloading Wikidata dump {self.version:%4Y%2m%2d}.\") def _construct_dumps(", "def __init__( self, dump_dir: Path, version: date, *, mirror: str", "str) -> _WikidataDumpStatus: path = dump_dir / f\"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json\" if not", ") as progress_bar_files, tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} bytes\", total=sum(dump_file.size for", "fd.write(json.dumps(response.json(), indent=2) + \"\\n\") _LOGGER.debug(\"Done downloading Wikidata dump status.\") dump_status", "import WikidataDumpSitesTable _LOGGER = getLogger(__name__) _T_WikidataDumpFile = TypeVar(\"_T_WikidataDumpFile\", bound=WikidataDumpFile) class", "TypeVar(\"_T_WikidataDumpFile\", bound=WikidataDumpFile) class WikidataDump: def __init__( self, dump_dir: Path, version:", "_WikidataDumpStatusFile] @validator(\"updated\", pre=True) def _parse_datetime(cls, value: str) -> datetime: #", "import WikidataDumpFile from wikidated.wikidata.wikidata_dump_pages_meta_history import ( WikidataDumpPagesMetaHistory, ) from wikidated.wikidata.wikidata_dump_sites_table", "dump_files.append(self.sites_table) if pages_meta_history: dump_files.extend(self.pages_meta_history.values()) with tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} files\",", "may obtain a copy of the License at # #", "dump {self.version:%4Y%2m%2d}.\") def _construct_dumps( self, dump_type: Type[_T_WikidataDumpFile], dump_type_id: str )", "typing_extensions import Final from wikidated._utils import RangeMap from wikidated.wikidata.wikidata_dump_file import", "download( self, *, sites_table: bool = True, pages_meta_history: bool =", "= mirror self._dump_status = _WikidataDumpStatus.load( self._dump_dir, self.version, self.mirror ) self.sites_table:", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "WikidataDumpSitesTable, \"sitestable\" )[0] self.pages_meta_history: Final = RangeMap[WikidataDumpPagesMetaHistory]() for dump_file in", "may not use this file except in compliance with the", "pydantic import BaseModel as PydanticModel from pydantic import validator from", "md5: str sha1: str class _WikidataDumpStatusJob(PydanticModel): status: str updated: datetime", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "indent=2) + \"\\n\") _LOGGER.debug(\"Done downloading Wikidata dump status.\") dump_status =", "this file except in compliance with the License. # You", "= f\"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json\" _LOGGER.debug(f\"Downloading Wikidata dump status from '{url}'.\") response =", "from '{url}'.\") response = requests.get(url) response.raise_for_status() path.parent.mkdir(exist_ok=True, parents=True) with path.open(\"w\",", "# noqa: N805 return datetime.strptime(value, \"%Y-%m-%d %H:%M:%S\") class _WikidataDumpStatus(PydanticModel): jobs:", "MutableSequence, Sequence, Type, TypeVar import requests from pydantic import BaseModel", "dump_status_file.url, sha1=dump_status_file.sha1, size=dump_status_file.size, ) for path, dump_status_file in self._dump_status.jobs[ dump_type_id", ") -> None: self._dump_dir = dump_dir self.version: Final = version", "def download( self, *, sites_table: bool = True, pages_meta_history: bool", "dump_dir: Path, version: date, mirror: str) -> _WikidataDumpStatus: path =", "f\"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json\" if not path.exists(): url = f\"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json\" _LOGGER.debug(f\"Downloading Wikidata dump", "<NAME> # # Licensed under the Apache License, Version 2.0", "+ dump_status_file.url, sha1=dump_status_file.sha1, size=dump_status_file.size, ) for path, dump_status_file in self._dump_status.jobs[", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "\"https://dumps.wikimedia.org\", ) -> None: self._dump_dir = dump_dir self.version: Final =", "# # Licensed under the Apache License, Version 2.0 (the", "= requests.get(url) response.raise_for_status() path.parent.mkdir(exist_ok=True, parents=True) with path.open(\"w\", encoding=\"UTF-8\") as fd:", "under the License. # from __future__ import annotations import json", "N805 return datetime.strptime(value, \"%Y-%m-%d %H:%M:%S\") class _WikidataDumpStatus(PydanticModel): jobs: Mapping[str, _WikidataDumpStatusJob]", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "-> None: _LOGGER.info( f\"Downloading Wikidata dump {self.version:%4Y%2m%2d} from '{self.mirror}'.\" )", "[] if sites_table: dump_files.append(self.sites_table) if pages_meta_history: dump_files.extend(self.pages_meta_history.values()) with tqdm( desc=f\"Wikidata", "int url: str md5: str sha1: str class _WikidataDumpStatusJob(PydanticModel): status:", ") from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable _LOGGER = getLogger(__name__) _T_WikidataDumpFile =", "status: str updated: datetime files: Mapping[str, _WikidataDumpStatusFile] @validator(\"updated\", pre=True) def", "from wikidated.wikidata.wikidata_dump_pages_meta_history import ( WikidataDumpPagesMetaHistory, ) from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable", "from pathlib import Path from typing import Mapping, MutableSequence, Sequence,", "path, url=self.mirror + dump_status_file.url, sha1=dump_status_file.sha1, size=dump_status_file.size, ) for path, dump_status_file", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "import date, datetime from logging import getLogger from pathlib import", "Wikidata dump {self.version:%4Y%2m%2d} from '{self.mirror}'.\" ) dump_files: MutableSequence[WikidataDumpFile] = []", "str md5: str sha1: str class _WikidataDumpStatusJob(PydanticModel): status: str updated:", "dump {self.version:%4Y%2m%2d} bytes\", total=sum(dump_file.size for dump_file in dump_files), dynamic_ncols=True, position=2,", "License. # from __future__ import annotations import json from datetime", "-> Sequence[_T_WikidataDumpFile]: return [ dump_type( path=self._dump_dir / path, url=self.mirror +", "Sequence, Type, TypeVar import requests from pydantic import BaseModel as", "getLogger(__name__) _T_WikidataDumpFile = TypeVar(\"_T_WikidataDumpFile\", bound=WikidataDumpFile) class WikidataDump: def __init__( self,", "dump_type_id: str ) -> Sequence[_T_WikidataDumpFile]: return [ dump_type( path=self._dump_dir /", "path, dump_status_file in self._dump_status.jobs[ dump_type_id ].files.items() ] class _WikidataDumpStatusFile(PydanticModel): size:", "downloading Wikidata dump status.\") dump_status = _WikidataDumpStatus.parse_file(path) for job_name, job", "def _parse_datetime(cls, value: str) -> datetime: # noqa: N805 return", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "return [ dump_type( path=self._dump_dir / path, url=self.mirror + dump_status_file.url, sha1=dump_status_file.sha1,", "datetime.strptime(value, \"%Y-%m-%d %H:%M:%S\") class _WikidataDumpStatus(PydanticModel): jobs: Mapping[str, _WikidataDumpStatusJob] version: str", "wikidated.wikidata.wikidata_dump_pages_meta_history import ( WikidataDumpPagesMetaHistory, ) from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable _LOGGER", "{self.version:%4Y%2m%2d} from '{self.mirror}'.\" ) dump_files: MutableSequence[WikidataDumpFile] = [] if sites_table:", "desc=f\"Wikidata dump {self.version:%4Y%2m%2d} files\", total=len(dump_files), dynamic_ncols=True, position=1, ) as progress_bar_files,", "or implied. # See the License for the specific language", "from pydantic import BaseModel as PydanticModel from pydantic import validator", "from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile from wikidated.wikidata.wikidata_dump_pages_meta_history import ( WikidataDumpPagesMetaHistory, )", "( WikidataDumpPagesMetaHistory, ) from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable _LOGGER = getLogger(__name__)", "with path.open(\"w\", encoding=\"UTF-8\") as fd: fd.write(json.dumps(response.json(), indent=2) + \"\\n\") _LOGGER.debug(\"Done", "_LOGGER.info(f\"Done downloading Wikidata dump {self.version:%4Y%2m%2d}.\") def _construct_dumps( self, dump_type: Type[_T_WikidataDumpFile],", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "status.\") dump_status = _WikidataDumpStatus.parse_file(path) for job_name, job in dump_status.jobs.items(): if", "unit=\"B\", unit_scale=True, unit_divisor=1024, ) as progress_bar_size: for dump_file in dump_files:", "if pages_meta_history: dump_files.extend(self.pages_meta_history.values()) with tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} files\", total=len(dump_files),", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "dynamic_ncols=True, position=2, unit=\"B\", unit_scale=True, unit_divisor=1024, ) as progress_bar_size: for dump_file", "MutableSequence[WikidataDumpFile] = [] if sites_table: dump_files.append(self.sites_table) if pages_meta_history: dump_files.extend(self.pages_meta_history.values()) with", "{self.version:%4Y%2m%2d}.\") def _construct_dumps( self, dump_type: Type[_T_WikidataDumpFile], dump_type_id: str ) ->", "parents=True) with path.open(\"w\", encoding=\"UTF-8\") as fd: fd.write(json.dumps(response.json(), indent=2) + \"\\n\")", "pre=True) def _parse_datetime(cls, value: str) -> datetime: # noqa: N805", "_parse_datetime(cls, value: str) -> datetime: # noqa: N805 return datetime.strptime(value,", "downloading Wikidata dump {self.version:%4Y%2m%2d}.\") def _construct_dumps( self, dump_type: Type[_T_WikidataDumpFile], dump_type_id:", "dump_status_file in self._dump_status.jobs[ dump_type_id ].files.items() ] class _WikidataDumpStatusFile(PydanticModel): size: int", "import BaseModel as PydanticModel from pydantic import validator from tqdm", "True, pages_meta_history: bool = True ) -> None: _LOGGER.info( f\"Downloading", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "import getLogger from pathlib import Path from typing import Mapping,", "class WikidataDump: def __init__( self, dump_dir: Path, version: date, *,", "Type, TypeVar import requests from pydantic import BaseModel as PydanticModel", "value: str) -> datetime: # noqa: N805 return datetime.strptime(value, \"%Y-%m-%d", "PydanticModel from pydantic import validator from tqdm import tqdm #", "!= \"done\": path.unlink() raise Exception(f\"Job '{job_name}' is not 'done', but", "class _WikidataDumpStatusFile(PydanticModel): size: int url: str md5: str sha1: str", "# # Unless required by applicable law or agreed to", "dump_file in self._construct_dumps( WikidataDumpPagesMetaHistory, \"metahistory7zdump\" ): self.pages_meta_history[dump_file.page_ids] = dump_file def", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "total=sum(dump_file.size for dump_file in dump_files), dynamic_ncols=True, position=2, unit=\"B\", unit_scale=True, unit_divisor=1024,", "].files.items() ] class _WikidataDumpStatusFile(PydanticModel): size: int url: str md5: str", "Version 2.0 (the \"License\"); # you may not use this", "str updated: datetime files: Mapping[str, _WikidataDumpStatusFile] @validator(\"updated\", pre=True) def _parse_datetime(cls,", "for job_name, job in dump_status.jobs.items(): if job.status != \"done\": path.unlink()", "if sites_table: dump_files.append(self.sites_table) if pages_meta_history: dump_files.extend(self.pages_meta_history.values()) with tqdm( desc=f\"Wikidata dump", "import RangeMap from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile from wikidated.wikidata.wikidata_dump_pages_meta_history import (", "import requests from pydantic import BaseModel as PydanticModel from pydantic", "tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} files\", total=len(dump_files), dynamic_ncols=True, position=1, ) as", "-> _WikidataDumpStatus: path = dump_dir / f\"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json\" if not path.exists():", "implied. # See the License for the specific language governing", "# from __future__ import annotations import json from datetime import", "TypeVar import requests from pydantic import BaseModel as PydanticModel from", "under the Apache License, Version 2.0 (the \"License\"); # you", "dump_status = _WikidataDumpStatus.parse_file(path) for job_name, job in dump_status.jobs.items(): if job.status", "+ \"\\n\") _LOGGER.debug(\"Done downloading Wikidata dump status.\") dump_status = _WikidataDumpStatus.parse_file(path)", "position=1, ) as progress_bar_files, tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} bytes\", total=sum(dump_file.size", "Wikidata dump status.\") dump_status = _WikidataDumpStatus.parse_file(path) for job_name, job in", "WikidataDump: def __init__( self, dump_dir: Path, version: date, *, mirror:", "by applicable law or agreed to in writing, software #", "WikidataDumpPagesMetaHistory, \"metahistory7zdump\" ): self.pages_meta_history[dump_file.page_ids] = dump_file def download( self, *,", "dump_dir / f\"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json\" if not path.exists(): url = f\"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json\" _LOGGER.debug(f\"Downloading", "f\"Downloading Wikidata dump {self.version:%4Y%2m%2d} from '{self.mirror}'.\" ) dump_files: MutableSequence[WikidataDumpFile] =", "f\"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json\" _LOGGER.debug(f\"Downloading Wikidata dump status from '{url}'.\") response = requests.get(url)", "from typing_extensions import Final from wikidated._utils import RangeMap from wikidated.wikidata.wikidata_dump_file", "self, dump_type: Type[_T_WikidataDumpFile], dump_type_id: str ) -> Sequence[_T_WikidataDumpFile]: return [", "datetime import date, datetime from logging import getLogger from pathlib", "in dump_files), dynamic_ncols=True, position=2, unit=\"B\", unit_scale=True, unit_divisor=1024, ) as progress_bar_size:", "= True, pages_meta_history: bool = True ) -> None: _LOGGER.info(", "from tqdm import tqdm # type: ignore from typing_extensions import", "self._construct_dumps( WikidataDumpSitesTable, \"sitestable\" )[0] self.pages_meta_history: Final = RangeMap[WikidataDumpPagesMetaHistory]() for dump_file", "dump {self.version:%4Y%2m%2d} from '{self.mirror}'.\" ) dump_files: MutableSequence[WikidataDumpFile] = [] if", "'{url}'.\") response = requests.get(url) response.raise_for_status() path.parent.mkdir(exist_ok=True, parents=True) with path.open(\"w\", encoding=\"UTF-8\")", "in dump_files: dump_file.download() progress_bar_files.update(1) progress_bar_size.update(dump_file.size) _LOGGER.info(f\"Done downloading Wikidata dump {self.version:%4Y%2m%2d}.\")", "from wikidated._utils import RangeMap from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile from wikidated.wikidata.wikidata_dump_pages_meta_history", "WikidataDumpFile from wikidated.wikidata.wikidata_dump_pages_meta_history import ( WikidataDumpPagesMetaHistory, ) from wikidated.wikidata.wikidata_dump_sites_table import", "self, *, sites_table: bool = True, pages_meta_history: bool = True", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "for dump_file in dump_files: dump_file.download() progress_bar_files.update(1) progress_bar_size.update(dump_file.size) _LOGGER.info(f\"Done downloading Wikidata", "/ f\"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json\" if not path.exists(): url = f\"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json\" _LOGGER.debug(f\"Downloading Wikidata", "Unless required by applicable law or agreed to in writing,", "sha1=dump_status_file.sha1, size=dump_status_file.size, ) for path, dump_status_file in self._dump_status.jobs[ dump_type_id ].files.items()", "size=dump_status_file.size, ) for path, dump_status_file in self._dump_status.jobs[ dump_type_id ].files.items() ]", "_WikidataDumpStatus.load( self._dump_dir, self.version, self.mirror ) self.sites_table: Final = self._construct_dumps( WikidataDumpSitesTable,", "= self._construct_dumps( WikidataDumpSitesTable, \"sitestable\" )[0] self.pages_meta_history: Final = RangeMap[WikidataDumpPagesMetaHistory]() for", "dump {self.version:%4Y%2m%2d} files\", total=len(dump_files), dynamic_ncols=True, position=1, ) as progress_bar_files, tqdm(", "def load(cls, dump_dir: Path, version: date, mirror: str) -> _WikidataDumpStatus:", "the specific language governing permissions and # limitations under the", "_LOGGER.info( f\"Downloading Wikidata dump {self.version:%4Y%2m%2d} from '{self.mirror}'.\" ) dump_files: MutableSequence[WikidataDumpFile]", "self._dump_dir, self.version, self.mirror ) self.sites_table: Final = self._construct_dumps( WikidataDumpSitesTable, \"sitestable\"", "from datetime import date, datetime from logging import getLogger from", "applicable law or agreed to in writing, software # distributed", "self.version, self.mirror ) self.sites_table: Final = self._construct_dumps( WikidataDumpSitesTable, \"sitestable\" )[0]", "progress_bar_files, tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} bytes\", total=sum(dump_file.size for dump_file in", "import Path from typing import Mapping, MutableSequence, Sequence, Type, TypeVar", "dump_status.jobs.items(): if job.status != \"done\": path.unlink() raise Exception(f\"Job '{job_name}' is", "dump_files: dump_file.download() progress_bar_files.update(1) progress_bar_size.update(dump_file.size) _LOGGER.info(f\"Done downloading Wikidata dump {self.version:%4Y%2m%2d}.\") def", "dump_dir self.version: Final = version self.mirror: Final = mirror self._dump_status", "in writing, software # distributed under the License is distributed", "datetime: # noqa: N805 return datetime.strptime(value, \"%Y-%m-%d %H:%M:%S\") class _WikidataDumpStatus(PydanticModel):", "self.mirror ) self.sites_table: Final = self._construct_dumps( WikidataDumpSitesTable, \"sitestable\" )[0] self.pages_meta_history:", "[ dump_type( path=self._dump_dir / path, url=self.mirror + dump_status_file.url, sha1=dump_status_file.sha1, size=dump_status_file.size,", "wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable _LOGGER = getLogger(__name__) _T_WikidataDumpFile = TypeVar(\"_T_WikidataDumpFile\", bound=WikidataDumpFile)", "dump_file in dump_files: dump_file.download() progress_bar_files.update(1) progress_bar_size.update(dump_file.size) _LOGGER.info(f\"Done downloading Wikidata dump", "dump_dir: Path, version: date, *, mirror: str = \"https://dumps.wikimedia.org\", )", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "in self._dump_status.jobs[ dump_type_id ].files.items() ] class _WikidataDumpStatusFile(PydanticModel): size: int url:", "License, Version 2.0 (the \"License\"); # you may not use", "as fd: fd.write(json.dumps(response.json(), indent=2) + \"\\n\") _LOGGER.debug(\"Done downloading Wikidata dump", "# You may obtain a copy of the License at", "): self.pages_meta_history[dump_file.page_ids] = dump_file def download( self, *, sites_table: bool", "if not path.exists(): url = f\"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json\" _LOGGER.debug(f\"Downloading Wikidata dump status", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "with tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} files\", total=len(dump_files), dynamic_ncols=True, position=1, )", "@classmethod def load(cls, dump_dir: Path, version: date, mirror: str) ->", "Final = mirror self._dump_status = _WikidataDumpStatus.load( self._dump_dir, self.version, self.mirror )", "= version self.mirror: Final = mirror self._dump_status = _WikidataDumpStatus.load( self._dump_dir,", "_LOGGER.debug(f\"Downloading Wikidata dump status from '{url}'.\") response = requests.get(url) response.raise_for_status()", "Mapping[str, _WikidataDumpStatusFile] @validator(\"updated\", pre=True) def _parse_datetime(cls, value: str) -> datetime:", "_LOGGER.debug(\"Done downloading Wikidata dump status.\") dump_status = _WikidataDumpStatus.parse_file(path) for job_name,", "self.pages_meta_history: Final = RangeMap[WikidataDumpPagesMetaHistory]() for dump_file in self._construct_dumps( WikidataDumpPagesMetaHistory, \"metahistory7zdump\"", "Wikidata dump status from '{url}'.\") response = requests.get(url) response.raise_for_status() path.parent.mkdir(exist_ok=True,", "import ( WikidataDumpPagesMetaHistory, ) from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable _LOGGER =", "*, mirror: str = \"https://dumps.wikimedia.org\", ) -> None: self._dump_dir =", "the License for the specific language governing permissions and #", "dump_type_id ].files.items() ] class _WikidataDumpStatusFile(PydanticModel): size: int url: str md5:", "Apache License, Version 2.0 (the \"License\"); # you may not", "self.version: Final = version self.mirror: Final = mirror self._dump_status =", "either express or implied. # See the License for the", "for dump_file in dump_files), dynamic_ncols=True, position=2, unit=\"B\", unit_scale=True, unit_divisor=1024, )", "job.status != \"done\": path.unlink() raise Exception(f\"Job '{job_name}' is not 'done',", "if job.status != \"done\": path.unlink() raise Exception(f\"Job '{job_name}' is not", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "\"%Y-%m-%d %H:%M:%S\") class _WikidataDumpStatus(PydanticModel): jobs: Mapping[str, _WikidataDumpStatusJob] version: str @classmethod", "bool = True ) -> None: _LOGGER.info( f\"Downloading Wikidata dump", "Mapping, MutableSequence, Sequence, Type, TypeVar import requests from pydantic import", "governing permissions and # limitations under the License. # from", "self.sites_table: Final = self._construct_dumps( WikidataDumpSitesTable, \"sitestable\" )[0] self.pages_meta_history: Final =", "bool = True, pages_meta_history: bool = True ) -> None:", "bound=WikidataDumpFile) class WikidataDump: def __init__( self, dump_dir: Path, version: date,", "= dump_dir self.version: Final = version self.mirror: Final = mirror", "\"metahistory7zdump\" ): self.pages_meta_history[dump_file.page_ids] = dump_file def download( self, *, sites_table:", "desc=f\"Wikidata dump {self.version:%4Y%2m%2d} bytes\", total=sum(dump_file.size for dump_file in dump_files), dynamic_ncols=True,", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "str) -> datetime: # noqa: N805 return datetime.strptime(value, \"%Y-%m-%d %H:%M:%S\")", "progress_bar_size.update(dump_file.size) _LOGGER.info(f\"Done downloading Wikidata dump {self.version:%4Y%2m%2d}.\") def _construct_dumps( self, dump_type:", ") dump_files: MutableSequence[WikidataDumpFile] = [] if sites_table: dump_files.append(self.sites_table) if pages_meta_history:", "str sha1: str class _WikidataDumpStatusJob(PydanticModel): status: str updated: datetime files:", "self._construct_dumps( WikidataDumpPagesMetaHistory, \"metahistory7zdump\" ): self.pages_meta_history[dump_file.page_ids] = dump_file def download( self,", "= [] if sites_table: dump_files.append(self.sites_table) if pages_meta_history: dump_files.extend(self.pages_meta_history.values()) with tqdm(", "_WikidataDumpStatus.parse_file(path) for job_name, job in dump_status.jobs.items(): if job.status != \"done\":", "mirror: str = \"https://dumps.wikimedia.org\", ) -> None: self._dump_dir = dump_dir", "requests from pydantic import BaseModel as PydanticModel from pydantic import", "\"License\"); # you may not use this file except in", "self._dump_status.jobs[ dump_type_id ].files.items() ] class _WikidataDumpStatusFile(PydanticModel): size: int url: str", "_LOGGER = getLogger(__name__) _T_WikidataDumpFile = TypeVar(\"_T_WikidataDumpFile\", bound=WikidataDumpFile) class WikidataDump: def", "jobs: Mapping[str, _WikidataDumpStatusJob] version: str @classmethod def load(cls, dump_dir: Path,", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "pydantic import validator from tqdm import tqdm # type: ignore", "files: Mapping[str, _WikidataDumpStatusFile] @validator(\"updated\", pre=True) def _parse_datetime(cls, value: str) ->", "dump_type: Type[_T_WikidataDumpFile], dump_type_id: str ) -> Sequence[_T_WikidataDumpFile]: return [ dump_type(", "# distributed under the License is distributed on an \"AS", "pages_meta_history: dump_files.extend(self.pages_meta_history.values()) with tqdm( desc=f\"Wikidata dump {self.version:%4Y%2m%2d} files\", total=len(dump_files), dynamic_ncols=True,", "# Unless required by applicable law or agreed to in", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "\"\\n\") _LOGGER.debug(\"Done downloading Wikidata dump status.\") dump_status = _WikidataDumpStatus.parse_file(path) for", "# Copyright 2021 <NAME> # # Licensed under the Apache", ")[0] self.pages_meta_history: Final = RangeMap[WikidataDumpPagesMetaHistory]() for dump_file in self._construct_dumps( WikidataDumpPagesMetaHistory,", "progress_bar_size: for dump_file in dump_files: dump_file.download() progress_bar_files.update(1) progress_bar_size.update(dump_file.size) _LOGGER.info(f\"Done downloading", "path.open(\"w\", encoding=\"UTF-8\") as fd: fd.write(json.dumps(response.json(), indent=2) + \"\\n\") _LOGGER.debug(\"Done downloading", "You may obtain a copy of the License at #", "import json from datetime import date, datetime from logging import", ") as progress_bar_size: for dump_file in dump_files: dump_file.download() progress_bar_files.update(1) progress_bar_size.update(dump_file.size)", "response = requests.get(url) response.raise_for_status() path.parent.mkdir(exist_ok=True, parents=True) with path.open(\"w\", encoding=\"UTF-8\") as", "date, mirror: str) -> _WikidataDumpStatus: path = dump_dir / f\"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json\"", "class _WikidataDumpStatus(PydanticModel): jobs: Mapping[str, _WikidataDumpStatusJob] version: str @classmethod def load(cls,", "class _WikidataDumpStatusJob(PydanticModel): status: str updated: datetime files: Mapping[str, _WikidataDumpStatusFile] @validator(\"updated\",", "the Apache License, Version 2.0 (the \"License\"); # you may", "import tqdm # type: ignore from typing_extensions import Final from" ]
[ "notional)', id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight':", "Table'), html.Div(id='detailed-execution-table') ], style={'width': '1000px', 'display': 'inline-block', 'marginBottom': 5, 'marginTop':", "print_function __author__ = 'saeedamen' # <NAME> / <EMAIL> # #", "from collections import OrderedDict from pandas.tseries.offsets import * from tcapy.vis.layoutdash", "multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance', drop_down_values=self.available_grouped_venues,", "= html.Div([ self._sc.header_bar('FX: Aggregated - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status:", "__author__ = 'saeedamen' # <NAME> / <EMAIL> # # Copyright", "Date', id='start-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker',", "[x.date() for x in available_dates[::-1]] # For detailed page only", "self.available_times = [t.strftime(\"%H:%M\") for t in times] self.available_tickers = self._constants.available_tickers_dictionary['All']", "self.available_times)]), prefix_id='detailed'), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed', drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed', drop_down_values=self.available_grouped_brokers),", "collections import OrderedDict from pandas.tseries.offsets import * from tcapy.vis.layoutdash import", "style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ self.pages['compliance'] = html.Div([", "download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance: Totals by Broker', id='summary-by-broker-table', prefix_id='compliance', columns=broker_cols, downloadplot_caption='Download broker", "# Secondary page for analysing aggregated statistics over long periods", "# 'timeline_trade_orders' : {'client-orders': 'order', 'executions': 'trade'}, # 'markout_trade_orders' :", "drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance',", "self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Executions: Histogram vs PDF fit',", "5, 'marginRight': 5}), ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'})", "prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: PDF fit (' + self._constants.reporting_currency", "'order_df', 'executions': 'trade_df'}, 'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df', 'order': 'sparse_market_order_df'}, 'detailed_markout_trade_order': {'execution':", "pages - detailed_page - for doing detailed tcapy analysis for", "'bid/ask'] # For aggregated page only self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues", "of (eg. over the course of a few days) self.pages['detailed']", "self.page_content = html.Div([ dcc.Location(id='url', refresh=False), html.Div(id='page-content') ]) link_bar_dict = {'Detailed'", "only self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary)", "self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance', columns=trade_outliers_cols, downloadplot_caption='Trade outliers CSV', downloadplot_tag='execution-by-anomalous-download-link',", "id='algo-val', prefix_id='aggregated', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload',", "['Date', 'ticker', 'side', 'notional cur', 'benchmark', 'exec not', 'exec not", "= { # Detailed trader page # 'timeline_trade_orders' : {'client-orders':", "prefix_id='detailed'), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed', drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed', drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo',", "drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated', drop_down_values=self.available_metrics), ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'),", "'marginLeft': 'auto'}) ################################################################################################################ self.pages['compliance'] = html.Div([ self._sc.header_bar('FX: Compliance Analysis', img='logo.png'),", "Date', id='finish-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker',", ": 'compliance'} trade_outliers_cols = ['Date', 'ticker', 'side', 'notional cur', 'benchmark',", "prefix_id='detailed', height=500), # Detailed tcapy markout table for executions html.Div([", "id={'start-date-val' : self.available_dates, 'start-time-val' : self.available_times}, prefix_id='detailed'), self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val',", "Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance', columns=trade_outliers_cols, downloadplot_caption='Trade outliers CSV', downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'),", "id=OrderedDict([('finish-date-val', self.available_dates), ('finish-time-val', self.available_times)]), prefix_id='detailed'), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed', drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker',", "self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance', drop_down_values=self.available_grouped_venues, multiselect=True),", "Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish", "construct_layout(self): self.page_content = html.Div([ dcc.Location(id='url', refresh=False), html.Div(id='page-content') ]) link_bar_dict =", "id='summary-by-broker-table', prefix_id='compliance', columns=broker_cols, downloadplot_caption='Download broker CSV', downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv' ), self._sc.horizontal_bar()", "self._sc.horizontal_bar(), html.Div([ self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'), # self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'),", "## Date/time components import pandas as pd import datetime from", "self.pages['compliance'] = html.Div([ self._sc.header_bar('FX: Compliance Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\",", "margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val',", "download_file='download_execution_candle_timeline.csv', height=500), self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Executions: Histogram vs", "('finish-time-val', self.available_times)]), prefix_id='detailed'), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed', drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed',", "id='aggregated-date-val', offset=[-7,-1]), self._sc.plot(caption='Aggregated Trader: Summary', id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar(),", "LayoutDash ######################################################################################################################## class LayoutDashImplGen(LayoutDash): \"\"\"This implements the LayoutDash abstract class,", "'side', 'notional cur', 'benchmark', 'exec not', 'exec not in rep", "'slippage'] broker_cols = ['Date', 'by broker notional (rep cur)'] #", "GUI for the tcapy application. It creates two web pages", "rep cur', 'slippage'] broker_cols = ['Date', 'by broker notional (rep", "os ## Date/time components import pandas as pd import datetime", "['yes', 'no'] self.construct_layout() def _flatten_dictionary(self, dictionary): available = dictionary['All'] available_groups", "prefix_id='compliance'), # self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'), # self.date_picker(caption='Finish Date', id='finish-date-dtpicker',", "self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'), # self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'), ]),", "self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'), # self.button(caption = 'Print PDF', id =", "prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance',", "tcapy analysis for a specific currency pair - aggregated_page -", "# self.button(caption = 'Print PDF', id = 'detailed-print-pdf-button', className =", "id='slippage-bounds-val', prefix_id='compliance', drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance', drop_down_values=self.available_visualization) ]), self._sc.horizontal_bar(), html.Div([", "License. # ## Web server components import dash_core_components as dcc", "margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id={'start-date-val'", "' notional)', id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar() ], style={'width': '1000px',", "available_groups = self._util_func.dict_key_list(dictionary.keys()) return self.flatten_list_of_strings([available_groups, available]) def construct_layout(self): self.page_content =", "Ltd. - http//www.cuemacro.com / @cuemacro # # See the License", "link_bar_dict = {'Detailed' : 'detailed', 'Aggregated' : 'aggregated', 'Compliance' :", "self.available_execution_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'buy trade', 'sell trade']", "id='broker-val', prefix_id='compliance', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue',", "]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'), # , msg_id='aggregated-status'), self._sc.horizontal_bar(), #", "boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val',", "Date', id='finish-date-dtpicker', prefix_id='compliance'), ]), self._sc.horizontal_bar(), self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance',", "element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv', height=500), self._sc.plot(caption='Executions: Markout', id='execution-markout-plot',", "'Compliance' : 'compliance'} trade_outliers_cols = ['Date', 'ticker', 'side', 'notional cur',", "'execution-by-venue-dist-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft':", "self._sc.width_row_cell(html.B(\"Status: ok\", id='detailed-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([", "= dictionary['All'] available_groups = self._util_func.dict_key_list(dictionary.keys()) return self.flatten_list_of_strings([available_groups, available]) def construct_layout(self):", "self.id_flags = { # Detailed trader page # 'timeline_trade_orders' :", "Time of Day', id='start-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Finish Time of Day',", "######################################################################################################################## class LayoutDashImplGen(LayoutDash): \"\"\"This implements the LayoutDash abstract class, to", "height=500), self._sc.plot(caption='Orders: Histogram vs PDF fit', id='order-dist-plot', prefix_id='detailed', height=500), #", "'auto', 'marginLeft': 'auto'}) # ID flags self.id_flags = { #", "Trader: Timeline', id='execution-by-ticker-timeline-plot', prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: PDF fit", "self.available_order_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline', height=500), self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed',", "multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated', drop_down_values=self.available_market_data),", "drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance',", "broker_cols = ['Date', 'by broker notional (rep cur)'] # Main", "prefix_id='aggregated', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val',", "Timeline', id='order-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline', height=500),", "= 'Print PDF', id = 'detailed-print-pdf-button', className = 'no-print'), #", "in available_dates[::-1]] # For detailed page only self.available_times = [t.strftime(\"%H:%M\")", "multiple time periods \"\"\" def __init__(self, app=None, constants=None, url_prefix=''): super(LayoutDashImplGen,", "a few days) self.pages['detailed'] = html.Div([ self._sc.header_bar('FX: Detailed - Trader", "Day', id='finish-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage to Mid (bp)', id='slippage-bounds-val', prefix_id='compliance',", "'markout_trade_orders' : {'client-orders': 'order_df', 'executions': 'trade_df'}, 'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df', 'order':", "broker CSV', downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv' ), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight':", "selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date',", "page # 'timeline_trade_orders' : {'client-orders': 'order', 'executions': 'trade'}, # 'markout_trade_orders'", "statistics over long periods of time, eg. who is the", "dictionary['All'] available_groups = self._util_func.dict_key_list(dictionary.keys()) return self.flatten_list_of_strings([available_groups, available]) def construct_layout(self): self.page_content", "self.available_brokers = self._constants.available_brokers_dictionary['All'] self.available_algos = self._constants.available_algos_dictionary['All'] self.available_market_data = self._constants.available_market_data self.available_order_plot_lines", "def construct_layout(self): self.page_content = html.Div([ dcc.Location(id='url', refresh=False), html.Div(id='page-content') ]) link_bar_dict", "For aggregated page only self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary)", "vs PDF fit', id='execution-dist-plot', prefix_id='detailed', height=500), # Detailed tcapy markout", "drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance', drop_down_values=self.available_grouped_brokers,", "multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance', drop_down_values=self.available_reload),", "creates two web pages - detailed_page - for doing detailed", "Totals by Broker', id='summary-by-broker-table', prefix_id='compliance', columns=broker_cols, downloadplot_caption='Download broker CSV', downloadplot_tag='summary-by-broker-download-link',", "], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ # Secondary", "self).__init__(app=app, constants=constants, url_prefix=url_prefix) available_dates = pd.date_range( datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(),", "executions html.Div([ html.H3('Executions: Markout Table'), html.Div(id='detailed-execution-table') ], style={'width': '1000px', 'display':", "currency pairs and over multiple time periods \"\"\" def __init__(self,", "self._constants.available_venues_dictionary['All'] self.available_brokers = self._constants.available_brokers_dictionary['All'] self.available_algos = self._constants.available_algos_dictionary['All'] self.available_market_data = self._constants.available_market_data", "prefix_id='aggregated', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val',", "self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance', drop_down_values=self.available_dates),", "self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val', prefix_id='compliance',", "= html.Div([ self._sc.header_bar('FX: Detailed - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status:", "- for more aggregated style analysis across multiple currency pairs", "pages) self.available_dates = [x.date() for x in available_dates[::-1]] # For", "'order': 'order_df'}, 'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'},", "of a few days) self.pages['detailed'] = html.Div([ self._sc.header_bar('FX: Detailed -", "drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Start Time of", "/ <EMAIL> # # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com", "governing permissions and limitations under the License. # ## Web", "id='start-date-dtpicker', prefix_id='compliance'), # self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'), ]), self._sc.horizontal_bar(), self._sc.table(caption='Compliance:", "# Main page for detailed analysing of (eg. over the", "'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df', 'order-candle-timeline': 'sparse_market_order_df'}, # Aggregated trader page", "self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates), ('finish-time-val', self.available_times)]), prefix_id='detailed'), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed',", "as dcc import dash_html_components as html import base64 import os", "drop_down_values=self.available_metrics) ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'), # self.button(caption = 'Print", "self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance', drop_down_values=self.available_grouped_brokers, multiselect=True),", "html.Div([ self._sc.header_bar('FX: Detailed - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\",", "cur)'] # Main page for detailed analysing of (eg. over", "id='execution-dist-plot', prefix_id='detailed', height=500), # Detailed tcapy markout table for executions", "Detailed tcapy markout table for executions html.Div([ html.H3('Executions: Markout Table'),", "'executions': 'trade_df'}, 'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df', 'order': 'sparse_market_order_df'}, 'detailed_markout_trade_order': {'execution': 'trade_df',", "id='algo-val', prefix_id='detailed', drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed', drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market Data', id='market-data-val',", "'order': 'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df', 'order-candle-timeline': 'sparse_market_order_df'}, # Aggregated trader", "= self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types = self._constants.available_event_types self.available_metrics =", "'bar_trade_df_by/mean/ticker', 'execution-by-venue': 'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order': {'execution-by-ticker':", "CSV', downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline', height=500), self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Orders:", ": {'client-orders': 'order', 'executions': 'trade'}, # 'markout_trade_orders' : {'client-orders': 'order_df',", "'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker', 'execution-by-venue': 'dist_trade_df_by/pdf/venue'}, # Compliance page 'compliance_metric_table_trade_order': {'execution-by-anomalous':", "id='market-data-val', prefix_id='compliance', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Start", "'ask', 'arrival', 'twap', 'vwap', 'buy trade', 'sell trade'] self.available_execution_plot_lines =", "'marginBottom': 5, 'marginTop': 5, 'marginLeft': 5, 'marginRight': 5}), ], style={'width':", "'auto'}) ################################################################################################################ # Secondary page for analysing aggregated statistics over", "), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) #", "datetime import timedelta from collections import OrderedDict from pandas.tseries.offsets import", "specific language governing permissions and limitations under the License. #", "__init__(self, app=None, constants=None, url_prefix=''): super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix) available_dates =", "################################################################################################################ self.pages['compliance'] = html.Div([ self._sc.header_bar('FX: Compliance Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status:", "'marginRight': 5}), ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################", "{'execution': 'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df',", "boxes on both pages # Reverse date list (for both", "down boxes on both pages # Reverse date list (for", "CSV', downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance: Totals by Broker', id='summary-by-broker-table', prefix_id='compliance', columns=broker_cols,", "times = pd.date_range(\"0:00\", \"23:59\", freq=\"15min\") ### create the possible values", "'1.25', '1.5', '2.0', 'bid/ask'] # For aggregated page only self.available_grouped_tickers", "prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline', height=500), self._sc.plot(caption='Orders: Markout',", "detailed tcapy analysis for a specific currency pair - aggregated_page", "self._util_func.dict_key_list(dictionary.keys()) return self.flatten_list_of_strings([available_groups, available]) def construct_layout(self): self.page_content = html.Div([ dcc.Location(id='url',", "html.H3('Executions: Markout Table'), html.Div(id='detailed-execution-table') ], style={'width': '1000px', 'display': 'inline-block', 'marginBottom':", "eg. who is the best broker? self.pages['aggregated'] = html.Div([ self._sc.header_bar('FX:", "self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance',", "drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed', drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed', drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue',", "url_prefix=''): super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix) available_dates = pd.date_range( datetime.datetime.today().date() -", "dash_html_components as html import base64 import os ## Date/time components", "dcc.Location(id='url', refresh=False), html.Div(id='page-content') ]) link_bar_dict = {'Detailed' : 'detailed', 'Aggregated'", "self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed', drop_down_values=self.available_metrics) ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'), #", "Date', id='start-date-dtpicker', prefix_id='compliance'), # self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'), ]), self._sc.horizontal_bar(),", "downloadplot_caption='Trade outliers CSV', downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance: Totals by Broker', id='summary-by-broker-table',", "self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Orders: Histogram vs PDF fit',", "refresh=False), html.Div(id='page-content') ]) link_bar_dict = {'Detailed' : 'detailed', 'Aggregated' :", "'ask', 'buy trade', 'sell trade'] self.available_slippage_bounds = ['0.25', '0.5', '1.0',", "Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro # # See the", "self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'), # self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'), # self.date_picker(caption='Finish", "Timeline', id='execution-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv', height=500),", "See the License for the specific language governing permissions and", "boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val',", "= pd.date_range(\"0:00\", \"23:59\", freq=\"15min\") ### create the possible values for", "[t.strftime(\"%H:%M\") for t in times] self.available_tickers = self._constants.available_tickers_dictionary['All'] self.available_venues =", "from pandas.tseries.offsets import * from tcapy.vis.layoutdash import LayoutDash ######################################################################################################################## class", "html.Div([ dcc.Location(id='url', refresh=False), html.Div(id='page-content') ]) link_bar_dict = {'Detailed' : 'detailed',", "from __future__ import division, print_function __author__ = 'saeedamen' # <NAME>", "'execution-by-venue': 'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker',", "self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated', drop_down_values=self.available_metrics), ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'), #", "trades self._sc.horizontal_bar(), self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines), downloadplot_caption='Download CSV',", "possible values for drop down boxes on both pages #", "['candlestick', 'mid', 'bid', 'ask', 'arrival', 'twap', 'vwap', 'buy trade', 'sell", "return self.flatten_list_of_strings([available_groups, available]) def construct_layout(self): self.page_content = html.Div([ dcc.Location(id='url', refresh=False),", "cur', 'slippage'] broker_cols = ['Date', 'by broker notional (rep cur)']", "# See the License for the specific language governing permissions", "not in rep cur', 'slippage'] broker_cols = ['Date', 'by broker", "# Orders self._sc.horizontal_bar(), self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines), downloadplot_caption='Download", "prefix_id='compliance'), ]), self._sc.horizontal_bar(), self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance', columns=trade_outliers_cols, downloadplot_caption='Trade", "download_file='download_broker.csv' ), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'})", "className = 'no-print'), # Orders self._sc.horizontal_bar(), self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed',", "long periods of time, eg. who is the best broker?", "Markout', id='execution-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Executions: Histogram vs PDF fit', id='execution-dist-plot',", "prefix_id='aggregated', drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated', drop_down_values=self.available_metrics), ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button',", "{'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df', 'order-candle-timeline': 'sparse_market_order_df'}, #", "import dash_core_components as dcc import dash_html_components as html import base64", "(bp)', id='slippage-bounds-val', prefix_id='compliance', drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance', drop_down_values=self.available_visualization) ]), self._sc.horizontal_bar(),", "img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='aggregated-status'), margin_left=5), self._sc.horizontal_bar(), # dropdown selection", "drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage to Mid (bp)', id='slippage-bounds-val', prefix_id='compliance', drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization', id='visualization-val',", "id='order-dist-plot', prefix_id='detailed', height=500), # Execution trades self._sc.horizontal_bar(), self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot',", "'exec not in rep cur', 'slippage'] broker_cols = ['Date', 'by", "both pages # Reverse date list (for both detailed and", "self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated', drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated', drop_down_values=self.available_metrics), ]),", "ok\", id='detailed-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start", "prefix_id='compliance', columns=trade_outliers_cols, downloadplot_caption='Trade outliers CSV', downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance: Totals by", ": self.available_dates, 'start-time-val' : self.available_times}, prefix_id='detailed'), self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates),", "columns=broker_cols, downloadplot_caption='Download broker CSV', downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv' ), self._sc.horizontal_bar() ], style={'width':", "drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated',", "'auto'}) # ID flags self.id_flags = { # Detailed trader", "Data', id='market-data-val', prefix_id='detailed', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed', drop_down_values=self.available_metrics) ]), self._sc.horizontal_bar(),", "timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(), freq=BDay()) times = pd.date_range(\"0:00\", \"23:59\", freq=\"15min\") ### create", "vs PDF fit', id='order-dist-plot', prefix_id='detailed', height=500), # Execution trades self._sc.horizontal_bar(),", "'by broker notional (rep cur)'] # Main page for detailed", "height=500), self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Executions: Histogram vs PDF", "ok\", id='aggregated-status'), margin_left=5), self._sc.horizontal_bar(), # dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start", "page for detailed analysing of (eg. over the course of", "prefix_id='detailed'), # self.button(caption = 'Print PDF', id = 'detailed-print-pdf-button', className", "id='event-type-val', prefix_id='aggregated', drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated', drop_down_values=self.available_metrics), ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate',", "img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='compliance-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection", "for the tcapy application. It creates two web pages -", "'buy trade', 'sell trade'] self.available_execution_plot_lines = ['candlestick', 'mid', 'bid', 'ask',", "self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_brokers, multiselect=True),", "'notional cur', 'benchmark', 'exec not', 'exec not in rep cur',", "from tcapy.vis.layoutdash import LayoutDash ######################################################################################################################## class LayoutDashImplGen(LayoutDash): \"\"\"This implements the", "import * from tcapy.vis.layoutdash import LayoutDash ######################################################################################################################## class LayoutDashImplGen(LayoutDash): \"\"\"This", "= self._constants.available_brokers_dictionary['All'] self.available_algos = self._constants.available_algos_dictionary['All'] self.available_market_data = self._constants.available_market_data self.available_order_plot_lines =", "['0.25', '0.5', '1.0', '1.25', '1.5', '2.0', 'bid/ask'] # For aggregated", "prefix_id='detailed', drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed', drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed', drop_down_values=self.available_grouped_venues),", "pair - aggregated_page - for more aggregated style analysis across", "Histogram vs PDF fit', id='execution-dist-plot', prefix_id='detailed', height=500), # Detailed tcapy", "self.available_order_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'arrival', 'twap', 'vwap', 'buy", "'sparse_market_order_df'}, 'detailed_markout_trade_order': {'execution': 'trade_df', 'order': 'order_df'}, 'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order':", "id='start-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val',", "self.pages['detailed'] = html.Div([ self._sc.header_bar('FX: Detailed - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict),", "self._sc.plot(caption='Orders: Histogram vs PDF fit', id='order-dist-plot', prefix_id='detailed', height=500), # Execution", "{'execution-candle-timeline': 'sparse_market_trade_df', 'order-candle-timeline': 'sparse_market_order_df'}, # Aggregated trader page 'aggregated_bar_trade_order': {'execution-by-ticker':", "self._sc.width_row_cell(html.B(\"Status: ok\", id='compliance-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([", "- timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(), freq=BDay()) times = pd.date_range(\"0:00\", \"23:59\", freq=\"15min\") ###", "dictionary): available = dictionary['All'] available_groups = self._util_func.dict_key_list(dictionary.keys()) return self.flatten_list_of_strings([available_groups, available])", "components import dash_core_components as dcc import dash_html_components as html import", "Data', id='market-data-val', prefix_id='compliance', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_reload),", "'0.5', '1.0', '1.25', '1.5', '2.0', 'bid/ask'] # For aggregated page", "Data', id='market-data-val', prefix_id='aggregated', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated', drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric',", "'marginLeft': 'auto'}) # ID flags self.id_flags = { # Detailed", "'arrival', 'twap', 'vwap', 'buy trade', 'sell trade'] self.available_execution_plot_lines = ['candlestick',", "{'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker', 'execution-by-venue': 'dist_trade_df_by/pdf/venue'}, #", "page only self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers =", "currency pair - aggregated_page - for more aggregated style analysis", "datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(), freq=BDay()) times = pd.date_range(\"0:00\", \"23:59\", freq=\"15min\")", "'dist_trade_df_by/pdf/venue'}, # Compliance page 'compliance_metric_table_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, 'compliance_download_link_trade_order':", "# 'markout_trade_orders' : {'client-orders': 'order_df', 'executions': 'trade_df'}, 'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df',", "Markout', id='order-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Orders: Histogram vs PDF fit', id='order-dist-plot',", "pandas.tseries.offsets import * from tcapy.vis.layoutdash import LayoutDash ######################################################################################################################## class LayoutDashImplGen(LayoutDash):", "# self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'), ]), self._sc.horizontal_bar(), self._sc.table(caption='Compliance: Trade Outliers',", "height=500), self._sc.plot(caption='Executions: Histogram vs PDF fit', id='execution-dist-plot', prefix_id='detailed', height=500), #", ": {'client-orders': 'order_df', 'executions': 'trade_df'}, 'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df', 'order': 'sparse_market_order_df'},", "'detailed', 'Aggregated' : 'aggregated', 'Compliance' : 'compliance'} trade_outliers_cols = ['Date',", "self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter", "values for drop down boxes on both pages # Reverse", "page 'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker', 'execution-by-venue': 'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue':", "from datetime import timedelta from collections import OrderedDict from pandas.tseries.offsets", "drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_brokers,", "= ['0.25', '0.5', '1.0', '1.25', '1.5', '2.0', 'bid/ask'] # For", "'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df', 'order': 'sparse_market_order_df'}, 'detailed_markout_trade_order': {'execution': 'trade_df', 'order': 'order_df'},", "trade_outliers_cols = ['Date', 'ticker', 'side', 'notional cur', 'benchmark', 'exec not',", "base64 import os ## Date/time components import pandas as pd", "import datetime from datetime import timedelta from collections import OrderedDict", "app=None, constants=None, url_prefix=''): super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix) available_dates = pd.date_range(", "Reverse date list (for both detailed and aggregated pages) self.available_dates", "date list (for both detailed and aggregated pages) self.available_dates =", "'no-print'), # Orders self._sc.horizontal_bar(), self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines),", "notional (rep cur)'] # Main page for detailed analysing of", "id='order-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline', height=500), self._sc.plot(caption='Orders:", "download_file='download_order_candle_timeline', height=500), self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Orders: Histogram vs", "'order-candle-timeline': 'sparse_market_order_df'}, # Aggregated trader page 'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker', 'execution-by-venue':", "It creates two web pages - detailed_page - for doing", "datetime.datetime.today().date(), freq=BDay()) times = pd.date_range(\"0:00\", \"23:59\", freq=\"15min\") ### create the", "id='broker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue',", "drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed', drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed', drop_down_values=self.available_market_data),", "by Time', id='filter-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Start Time of Day', id='start-time-of-day-val',", "self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types", "self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed', drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric',", "CSV', downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv' ), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto',", "'trade_df', 'order': 'order_df'}, 'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order':", "height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot', prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated", "to Mid (bp)', id='slippage-bounds-val', prefix_id='compliance', drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance', drop_down_values=self.available_visualization)", "]), self._sc.horizontal_bar(), self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance', columns=trade_outliers_cols, downloadplot_caption='Trade outliers", "'Print PDF', id = 'detailed-print-pdf-button', className = 'no-print'), # Orders", "aggregated page only self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers", "Timeline', id='execution-by-ticker-timeline-plot', prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: PDF fit ('", "= self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types =", "boxes html.Div([ self._sc.drop_down(caption='Start Date', id={'start-date-val' : self.available_dates, 'start-time-val' : self.available_times},", "Dates', id='aggregated-date-val', offset=[-7,-1]), self._sc.plot(caption='Aggregated Trader: Summary', id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500),", "{'execution-by-ticker': 'bar_trade_df_by/mean/ticker', 'execution-by-venue': 'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order':", "doing detailed tcapy analysis for a specific currency pair -", "for more aggregated style analysis across multiple currency pairs and", "= self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types = self._constants.available_event_types self.available_metrics = self._constants.available_metrics self.available_reload =", "self._constants.available_algos_dictionary['All'] self.available_market_data = self._constants.available_market_data self.available_order_plot_lines = ['candlestick', 'mid', 'bid', 'ask',", "self._constants.available_tickers_dictionary['All'] self.available_venues = self._constants.available_venues_dictionary['All'] self.available_brokers = self._constants.available_brokers_dictionary['All'] self.available_algos = self._constants.available_algos_dictionary['All']", "= [x.date() for x in available_dates[::-1]] # For detailed page", "prefix_id='aggregated', drop_down_values=self.available_metrics), ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'), # , msg_id='aggregated-status'),", "id='ticker-val', prefix_id='compliance', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo',", "trade'] self.available_execution_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'buy trade', 'sell", "- detailed_page - for doing detailed tcapy analysis for a", "downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance: Totals by Broker', id='summary-by-broker-table', prefix_id='compliance', columns=broker_cols, downloadplot_caption='Download", "for t in times] self.available_tickers = self._constants.available_tickers_dictionary['All'] self.available_venues = self._constants.available_venues_dictionary['All']", "page 'compliance_metric_table_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, 'compliance_download_link_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker':", "prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Start Time of Day', id='start-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Finish", "id='visualization-val', prefix_id='compliance', drop_down_values=self.available_visualization) ]), self._sc.horizontal_bar(), html.Div([ self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'), #", "and aggregated pages) self.available_dates = [x.date() for x in available_dates[::-1]]", "'bid', 'ask', 'arrival', 'twap', 'vwap', 'buy trade', 'sell trade'] self.available_execution_plot_lines", "self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types = self._constants.available_event_types self.available_metrics", "as pd import datetime from datetime import timedelta from collections", "tcapy markout table for executions html.Div([ html.H3('Executions: Markout Table'), html.Div(id='detailed-execution-table')", "Markout Table'), html.Div(id='detailed-execution-table') ], style={'width': '1000px', 'display': 'inline-block', 'marginBottom': 5,", "dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish", "self._sc.drop_down(caption='Start Date', id={'start-date-val' : self.available_dates, 'start-time-val' : self.available_times}, prefix_id='detailed'), self._sc.drop_down(caption='Finish", "CSV', downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv', height=500), self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Executions:", "multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated', drop_down_values=self.available_reload),", "drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated',", "self._constants.reporting_currency + ' notional)', id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar() ],", "'1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ # Secondary page for", "(' + self._constants.reporting_currency + ' notional)', id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'], prefix_id='aggregated', height=500),", "of time, eg. who is the best broker? self.pages['aggregated'] =", "# dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated', drop_down_values=self.available_dates),", "height=500), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################", "Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='detailed-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown", "freq=\"15min\") ### create the possible values for drop down boxes", "Orders self._sc.horizontal_bar(), self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines), downloadplot_caption='Download CSV',", "downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv' ), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft':", "self.available_tickers = self._constants.available_tickers_dictionary['All'] self.available_venues = self._constants.available_venues_dictionary['All'] self.available_brokers = self._constants.available_brokers_dictionary['All'] self.available_algos", "_flatten_dictionary(self, dictionary): available = dictionary['All'] available_groups = self._util_func.dict_key_list(dictionary.keys()) return self.flatten_list_of_strings([available_groups,", "= html.Div([ dcc.Location(id='url', refresh=False), html.Div(id='page-content') ]) link_bar_dict = {'Detailed' :", "drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance', drop_down_values=self.available_grouped_tickers,", "Secondary page for analysing aggregated statistics over long periods of", "dash_core_components as dcc import dash_html_components as html import base64 import", "# <NAME> / <EMAIL> # # Copyright 2017 Cuemacro Ltd.", "## Web server components import dash_core_components as dcc import dash_html_components", "is the best broker? self.pages['aggregated'] = html.Div([ self._sc.header_bar('FX: Aggregated -", "prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot', prefix_id='aggregated', height=500), self._sc.horizontal_bar(),", "for x in available_dates[::-1]] # For detailed page only self.available_times", "self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline',", "class, to create the web based GUI for the tcapy", "self._sc.horizontal_bar(), self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='execution-candle-timeline-download-link',", "self.available_venues = self._constants.available_venues_dictionary['All'] self.available_brokers = self._constants.available_brokers_dictionary['All'] self.available_algos = self._constants.available_algos_dictionary['All'] self.available_market_data", "constants=None, url_prefix=''): super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix) available_dates = pd.date_range( datetime.datetime.today().date()", "two web pages - detailed_page - for doing detailed tcapy", "# self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]), self._sc.plot(caption='Aggregated Trader: Summary', id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'],", "who is the best broker? self.pages['aggregated'] = html.Div([ self._sc.header_bar('FX: Aggregated", "in times] self.available_tickers = self._constants.available_tickers_dictionary['All'] self.available_venues = self._constants.available_venues_dictionary['All'] self.available_brokers =", "broker notional (rep cur)'] # Main page for detailed analysing", "self._sc.horizontal_bar(), self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance', columns=trade_outliers_cols, downloadplot_caption='Trade outliers CSV',", "self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos", "import division, print_function __author__ = 'saeedamen' # <NAME> / <EMAIL>", "table for executions html.Div([ html.H3('Executions: Markout Table'), html.Div(id='detailed-execution-table') ], style={'width':", "{'Detailed' : 'detailed', 'Aggregated' : 'aggregated', 'Compliance' : 'compliance'} trade_outliers_cols", "id='algo-val', prefix_id='compliance', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload',", "tcapy.vis.layoutdash import LayoutDash ######################################################################################################################## class LayoutDashImplGen(LayoutDash): \"\"\"This implements the LayoutDash", "(for both detailed and aggregated pages) self.available_dates = [x.date() for", "html.Div([ self._sc.header_bar('FX: Aggregated - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\",", "Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='aggregated-status'), margin_left=5), self._sc.horizontal_bar(), # dropdown", "and limitations under the License. # ## Web server components", "html.Div([ self._sc.drop_down(caption='Start Date', id={'start-date-val' : self.available_dates, 'start-time-val' : self.available_times}, prefix_id='detailed'),", "Aggregated - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='aggregated-status'), margin_left=5),", "['Date', 'by broker notional (rep cur)'] # Main page for", "id='metric-val', prefix_id='aggregated', drop_down_values=self.available_metrics), ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'), # ,", "5}), ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ #", "analysis across multiple currency pairs and over multiple time periods", "'exec not', 'exec not in rep cur', 'slippage'] broker_cols =", "based GUI for the tcapy application. It creates two web", "drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_tickers,", "self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Start Time of Day',", "Broker', id='summary-by-broker-table', prefix_id='compliance', columns=broker_cols, downloadplot_caption='Download broker CSV', downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv' ),", "id = 'detailed-print-pdf-button', className = 'no-print'), # Orders self._sc.horizontal_bar(), self._sc.plot(caption='Orders:", "abstract class, to create the web based GUI for the", "across multiple currency pairs and over multiple time periods \"\"\"", "PDF fit', id='execution-dist-plot', prefix_id='detailed', height=500), # Detailed tcapy markout table", "\"\"\"This implements the LayoutDash abstract class, to create the web", "a specific currency pair - aggregated_page - for more aggregated", "id='finish-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val',", "multiple currency pairs and over multiple time periods \"\"\" def", "server components import dash_core_components as dcc import dash_html_components as html", "def __init__(self, app=None, constants=None, url_prefix=''): super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix) available_dates", "id='detailed-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date',", "+ ' notional)', id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar() ], style={'width':", "available_dates[::-1]] # For detailed page only self.available_times = [t.strftime(\"%H:%M\") for", "self.available_algos = self._constants.available_algos_dictionary['All'] self.available_market_data = self._constants.available_market_data self.available_order_plot_lines = ['candlestick', 'mid',", "self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='compliance-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes", "id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot', prefix_id='aggregated',", "implements the LayoutDash abstract class, to create the web based", "= 'no-print'), # Orders self._sc.horizontal_bar(), self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot',", "the License. # ## Web server components import dash_core_components as", "'execution-by-venue': 'dist_trade_df_by/pdf/venue'}, # Compliance page 'compliance_metric_table_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'},", "__future__ import division, print_function __author__ = 'saeedamen' # <NAME> /", "for the specific language governing permissions and limitations under the", "'mid', 'bid', 'ask', 'buy trade', 'sell trade'] self.available_slippage_bounds = ['0.25',", "html.Div([ self._sc.header_bar('FX: Compliance Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='compliance-status'), margin_left=5),", "Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='detailed-status'), margin_left=5), self._sc.horizontal_bar(), #", "'twap', 'vwap', 'buy trade', 'sell trade'] self.available_execution_plot_lines = ['candlestick', 'mid',", "id='venue-val', prefix_id='detailed', drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric', id='metric-val',", "self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated', drop_down_values=self.available_dates),", "drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed', drop_down_values=self.available_metrics)", "times] self.available_tickers = self._constants.available_tickers_dictionary['All'] self.available_venues = self._constants.available_venues_dictionary['All'] self.available_brokers = self._constants.available_brokers_dictionary['All']", "margin_left=5), self._sc.horizontal_bar(), # dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val',", "drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated',", "html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated',", "Trader: PDF fit (' + self._constants.reporting_currency + ' notional)', id=['execution-by-ticker-dist-plot',", "= [t.strftime(\"%H:%M\") for t in times] self.available_tickers = self._constants.available_tickers_dictionary['All'] self.available_venues", "html.Div([ html.H3('Executions: Markout Table'), html.Div(id='detailed-execution-table') ], style={'width': '1000px', 'display': 'inline-block',", "style={'width': '1000px', 'display': 'inline-block', 'marginBottom': 5, 'marginTop': 5, 'marginLeft': 5,", "the course of a few days) self.pages['detailed'] = html.Div([ self._sc.header_bar('FX:", "\"\"\" def __init__(self, app=None, constants=None, url_prefix=''): super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix)", "self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_tickers, multiselect=True),", "as html import base64 import os ## Date/time components import", "the License for the specific language governing permissions and limitations", "- Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='aggregated-status'), margin_left=5), self._sc.horizontal_bar(),", "prefix_id='detailed', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed', drop_down_values=self.available_metrics) ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button',", "super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix) available_dates = pd.date_range( datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window),", "id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto',", "both detailed and aggregated pages) self.available_dates = [x.date() for x", "# Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id={'start-date-val' : self.available_dates,", "id='calculation-button', prefix_id='compliance'), # self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'), # self.date_picker(caption='Finish Date',", "# For detailed page only self.available_times = [t.strftime(\"%H:%M\") for t", "= ['Date', 'by broker notional (rep cur)'] # Main page", "'display': 'inline-block', 'marginBottom': 5, 'marginTop': 5, 'marginLeft': 5, 'marginRight': 5}),", "'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot', prefix_id='aggregated', height=500),", "id='broker-val', prefix_id='detailed', drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed', drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed',", "self._sc.width_row_cell(html.B(\"Status: ok\", id='aggregated-status'), margin_left=5), self._sc.horizontal_bar(), # dropdown selection boxes html.Div([", "], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ self.pages['compliance'] =", "'sell trade'] self.available_execution_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'buy trade',", "'yes'] self.available_visualization = ['yes', 'no'] self.construct_layout() def _flatten_dictionary(self, dictionary): available", "self._sc.plot(caption='Executions: Histogram vs PDF fit', id='execution-dist-plot', prefix_id='detailed', height=500), # Detailed", "'start-time-val' : self.available_times}, prefix_id='detailed'), self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates), ('finish-time-val', self.available_times)]),", "on both pages # Reverse date list (for both detailed", "= self._constants.available_tickers_dictionary['All'] self.available_venues = self._constants.available_venues_dictionary['All'] self.available_brokers = self._constants.available_brokers_dictionary['All'] self.available_algos =", "= {'Detailed' : 'detailed', 'Aggregated' : 'aggregated', 'Compliance' : 'compliance'}", "time, eg. who is the best broker? self.pages['aggregated'] = html.Div([", "Detailed trader page # 'timeline_trade_orders' : {'client-orders': 'order', 'executions': 'trade'},", "multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated', drop_down_values=self.available_grouped_algos,", "aggregated_page - for more aggregated style analysis across multiple currency", "self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed', drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed', drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo', id='algo-val',", "'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ self.pages['compliance'] = html.Div([ self._sc.header_bar('FX: Compliance", "{'execution-by-ticker': 'dist_trade_df_by/pdf/ticker', 'execution-by-venue': 'dist_trade_df_by/pdf/venue'}, # Compliance page 'compliance_metric_table_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all',", "self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated', drop_down_values=self.available_grouped_venues, multiselect=True),", "self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot', prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader:", "'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df', 'order-candle-timeline': 'sparse_market_order_df'}, # Aggregated", "self._sc.plot(caption='Aggregated Trader: Summary', id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader:", "html.Div(id='detailed-execution-table') ], style={'width': '1000px', 'display': 'inline-block', 'marginBottom': 5, 'marginTop': 5,", "the LayoutDash abstract class, to create the web based GUI", "self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary)", "create the web based GUI for the tcapy application. It", "Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id={'start-date-val' : self.available_dates, 'start-time-val'", "### create the possible values for drop down boxes on", "id='venue-val', prefix_id='aggregated', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data',", "time periods \"\"\" def __init__(self, app=None, constants=None, url_prefix=''): super(LayoutDashImplGen, self).__init__(app=app,", "= ['Date', 'ticker', 'side', 'notional cur', 'benchmark', 'exec not', 'exec", "available_dates = pd.date_range( datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(), freq=BDay()) times =", "= ['yes', 'no'] self.construct_layout() def _flatten_dictionary(self, dictionary): available = dictionary['All']", "drop_down_values=self.available_metrics), ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'), # , msg_id='aggregated-status'), self._sc.horizontal_bar(),", "# Detailed tcapy markout table for executions html.Div([ html.H3('Executions: Markout", "'trade'}, # 'markout_trade_orders' : {'client-orders': 'order_df', 'executions': 'trade_df'}, 'detailed_candle_timeline_trade_order': {'execution':", "Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='compliance-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown", "web pages - detailed_page - for doing detailed tcapy analysis", "multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance', drop_down_values=self.available_grouped_algos,", "specific currency pair - aggregated_page - for more aggregated style", "self._constants.available_brokers_dictionary['All'] self.available_algos = self._constants.available_algos_dictionary['All'] self.available_market_data = self._constants.available_market_data self.available_order_plot_lines = ['candlestick',", "self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed', drop_down_values=self.available_metrics) ]),", "5, 'marginTop': 5, 'marginLeft': 5, 'marginRight': 5}), ], style={'width': '1000px',", "'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ # Secondary page for analysing", "by Broker', id='summary-by-broker-table', prefix_id='compliance', columns=broker_cols, downloadplot_caption='Download broker CSV', downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv'", "application. It creates two web pages - detailed_page - for", "pd.date_range(\"0:00\", \"23:59\", freq=\"15min\") ### create the possible values for drop", "Time of Day', id='finish-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage to Mid (bp)',", "Time', id='filter-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Start Time of Day', id='start-time-of-day-val', prefix_id='compliance',", "]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'), # self.button(caption = 'Print PDF',", "self.available_dates, 'start-time-val' : self.available_times}, prefix_id='detailed'), self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates), ('finish-time-val',", "self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types = self._constants.available_event_types self.available_metrics = self._constants.available_metrics", "# Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance', drop_down_values=self.available_dates),", "html import base64 import os ## Date/time components import pandas", "pd.date_range( datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(), freq=BDay()) times = pd.date_range(\"0:00\", \"23:59\",", "periods \"\"\" def __init__(self, app=None, constants=None, url_prefix=''): super(LayoutDashImplGen, self).__init__(app=app, constants=constants,", "<NAME> / <EMAIL> # # Copyright 2017 Cuemacro Ltd. -", "best broker? self.pages['aggregated'] = html.Div([ self._sc.header_bar('FX: Aggregated - Trader Analysis',", "multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated', drop_down_values=self.available_grouped_venues,", "PDF', id = 'detailed-print-pdf-button', className = 'no-print'), # Orders self._sc.horizontal_bar(),", "2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro # # See", "self._sc.plot(caption='Aggregated Trader: PDF fit (' + self._constants.reporting_currency + ' notional)',", "(rep cur)'] # Main page for detailed analysing of (eg.", "few days) self.pages['detailed'] = html.Div([ self._sc.header_bar('FX: Detailed - Trader Analysis',", "id='reload-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter by", "'1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ self.pages['compliance'] = html.Div([ self._sc.header_bar('FX:", "Aggregated trader page 'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker', 'execution-by-venue': 'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order': {'execution-by-ticker':", "self.available_times}, prefix_id='detailed'), self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates), ('finish-time-val', self.available_times)]), prefix_id='detailed'), self._sc.drop_down(caption='Ticker',", "page for analysing aggregated statistics over long periods of time,", "detailed and aggregated pages) self.available_dates = [x.date() for x in", "'order': 'sparse_market_order_df'}, 'detailed_markout_trade_order': {'execution': 'trade_df', 'order': 'order_df'}, 'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'},", "self.available_dates = [x.date() for x in available_dates[::-1]] # For detailed", "pandas as pd import datetime from datetime import timedelta from", "trader page # 'timeline_trade_orders' : {'client-orders': 'order', 'executions': 'trade'}, #", "], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) # ID flags", "import timedelta from collections import OrderedDict from pandas.tseries.offsets import *", "self.pages['aggregated'] = html.Div([ self._sc.header_bar('FX: Aggregated - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict),", "# Compliance page 'compliance_metric_table_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, 'compliance_download_link_trade_order': {'execution-by-anomalous':", "self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'), # self.button(caption = 'Print PDF', id", "# Reverse date list (for both detailed and aggregated pages)", "'inline-block', 'marginBottom': 5, 'marginTop': 5, 'marginLeft': 5, 'marginRight': 5}), ],", "self._sc.horizontal_bar(), # self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]), self._sc.plot(caption='Aggregated Trader: Summary', id=['execution-by-ticker-bar-plot',", "self.available_reload = ['no', 'yes'] self.available_visualization = ['yes', 'no'] self.construct_layout() def", "Detailed - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='detailed-status'), margin_left=5),", "id='execution-by-anomalous-table', prefix_id='compliance', columns=trade_outliers_cols, downloadplot_caption='Trade outliers CSV', downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance: Totals", "available = dictionary['All'] available_groups = self._util_func.dict_key_list(dictionary.keys()) return self.flatten_list_of_strings([available_groups, available]) def", "self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types = self._constants.available_event_types self.available_metrics = self._constants.available_metrics self.available_reload", "= self._constants.available_market_data self.available_order_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'arrival', 'twap',", "['no', 'yes'] self.available_visualization = ['yes', 'no'] self.construct_layout() def _flatten_dictionary(self, dictionary):", "{ # Detailed trader page # 'timeline_trade_orders' : {'client-orders': 'order',", "dcc import dash_html_components as html import base64 import os ##", "broker? self.pages['aggregated'] = html.Div([ self._sc.header_bar('FX: Aggregated - Trader Analysis', img='logo.png'),", ", msg_id='aggregated-status'), self._sc.horizontal_bar(), # self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]), self._sc.plot(caption='Aggregated Trader:", "'2.0', 'bid/ask'] # For aggregated page only self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary)", "+ self._constants.reporting_currency + ' notional)', id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar()", "= ['candlestick', 'mid', 'bid', 'ask', 'buy trade', 'sell trade'] self.available_slippage_bounds", "self._sc.horizontal_bar(), self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='order-candle-timeline-download-link',", "id='compliance-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date',", "'marginRight': 'auto', 'marginLeft': 'auto'}) # ID flags self.id_flags = {", "id='start-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val',", "prefix_id='compliance', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val',", "'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker', 'execution-by-venue': 'dist_trade_df_by/pdf/venue'},", "'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker', 'execution-by-venue': 'dist_trade_df_by/pdf/venue'}, # Compliance page", "prefix_id='detailed', drop_down_values=self.available_metrics) ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'), # self.button(caption =", "License for the specific language governing permissions and limitations under", "prefix_id='detailed', drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed',", "= ['candlestick', 'mid', 'bid', 'ask', 'arrival', 'twap', 'vwap', 'buy trade',", "'ticker', 'side', 'notional cur', 'benchmark', 'exec not', 'exec not in", "prefix_id='compliance', drop_down_values=self.available_visualization) ]), self._sc.horizontal_bar(), html.Div([ self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'), # self.date_picker(caption='Start", "self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='detailed-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes", "pages # Reverse date list (for both detailed and aggregated", "id='metric-val', prefix_id='detailed', drop_down_values=self.available_metrics) ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'), # self.button(caption", "trade', 'sell trade'] self.available_slippage_bounds = ['0.25', '0.5', '1.0', '1.25', '1.5',", "height=500), # Detailed tcapy markout table for executions html.Div([ html.H3('Executions:", "id='order-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Orders: Histogram vs PDF fit', id='order-dist-plot', prefix_id='detailed',", "prefix_id='detailed'), self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates), ('finish-time-val', self.available_times)]), prefix_id='detailed'), self._sc.drop_down(caption='Ticker', id='ticker-val',", "Date', id='finish-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker',", "id='ticker-val', prefix_id='detailed', drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed', drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed',", "outliers CSV', downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance: Totals by Broker', id='summary-by-broker-table', prefix_id='compliance',", "Date/time components import pandas as pd import datetime from datetime", "id='reload-val', prefix_id='aggregated', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event Type',", "over multiple time periods \"\"\" def __init__(self, app=None, constants=None, url_prefix=''):", "downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv', height=500), self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Executions: Histogram", "components import pandas as pd import datetime from datetime import", "Date', id={'start-date-val' : self.available_dates, 'start-time-val' : self.available_times}, prefix_id='detailed'), self._sc.drop_down(caption='Finish Date',", "'detailed_markout_trade_order': {'execution': 'trade_df', 'order': 'order_df'}, 'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order': {'execution':", "drop down boxes on both pages # Reverse date list", "Mid (bp)', id='slippage-bounds-val', prefix_id='compliance', drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance', drop_down_values=self.available_visualization) ]),", "course of a few days) self.pages['detailed'] = html.Div([ self._sc.header_bar('FX: Detailed", "self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]), self._sc.plot(caption='Aggregated Trader: Summary', id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated',", "self.available_slippage_bounds = ['0.25', '0.5', '1.0', '1.25', '1.5', '2.0', 'bid/ask'] #", "'vwap', 'buy trade', 'sell trade'] self.available_execution_plot_lines = ['candlestick', 'mid', 'bid',", "], style={'width': '1000px', 'display': 'inline-block', 'marginBottom': 5, 'marginTop': 5, 'marginLeft':", "Compliance Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='compliance-status'), margin_left=5), self._sc.horizontal_bar(), #", "# # See the License for the specific language governing", "id='market-data-val', prefix_id='detailed', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed', drop_down_values=self.available_metrics) ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate',", "periods of time, eg. who is the best broker? self.pages['aggregated']", "fit (' + self._constants.reporting_currency + ' notional)', id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'], prefix_id='aggregated',", "detailed page only self.available_times = [t.strftime(\"%H:%M\") for t in times]", "]) link_bar_dict = {'Detailed' : 'detailed', 'Aggregated' : 'aggregated', 'Compliance'", "# , msg_id='aggregated-status'), self._sc.horizontal_bar(), # self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]), self._sc.plot(caption='Aggregated", "to create the web based GUI for the tcapy application.", "self.available_dates), ('finish-time-val', self.available_times)]), prefix_id='detailed'), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed', drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker', id='broker-val',", "trader page 'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker', 'execution-by-venue': 'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker',", "constants=constants, url_prefix=url_prefix) available_dates = pd.date_range( datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(), freq=BDay())", "drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance', drop_down_values=self.available_visualization) ]), self._sc.horizontal_bar(), html.Div([ self._sc.button(caption='Calculate', id='calculation-button',", "http//www.cuemacro.com / @cuemacro # # See the License for the", "selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id={'start-date-val' : self.available_dates, 'start-time-val' :", "Date', id=OrderedDict([('finish-date-val', self.available_dates), ('finish-time-val', self.available_times)]), prefix_id='detailed'), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed', drop_down_values=self.available_tickers),", "self._sc.header_bar('FX: Detailed - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='detailed-status'),", "import base64 import os ## Date/time components import pandas as", "'marginTop': 5, 'marginLeft': 5, 'marginRight': 5}), ], style={'width': '1000px', 'marginRight':", "height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: PDF fit (' + self._constants.reporting_currency +", "self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types = self._constants.available_event_types", "self.button(caption = 'Print PDF', id = 'detailed-print-pdf-button', className = 'no-print'),", "self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance', drop_down_values=self.available_grouped_tickers, multiselect=True),", "self.flatten_list_of_strings([available_groups, available]) def construct_layout(self): self.page_content = html.Div([ dcc.Location(id='url', refresh=False), html.Div(id='page-content')", "downloadplot_caption='Download CSV', downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline', height=500), self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed', height=500),", "self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed', drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed', drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue', id='venue-val',", "markout table for executions html.Div([ html.H3('Executions: Markout Table'), html.Div(id='detailed-execution-table') ],", "import dash_html_components as html import base64 import os ## Date/time", "drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated',", "self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'), ]), self._sc.horizontal_bar(), self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table',", "id='execution-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv', height=500), self._sc.plot(caption='Executions:", "division, print_function __author__ = 'saeedamen' # <NAME> / <EMAIL> #", "LayoutDash abstract class, to create the web based GUI for", "- Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='detailed-status'), margin_left=5), self._sc.horizontal_bar(),", "'sell trade'] self.available_slippage_bounds = ['0.25', '0.5', '1.0', '1.25', '1.5', '2.0',", "prefix_id='aggregated', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val',", "downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline', height=500), self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Orders: Histogram", "the web based GUI for the tcapy application. It creates", "'marginLeft': 5, 'marginRight': 5}), ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft':", "over long periods of time, eg. who is the best", "id='execution-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Executions: Histogram vs PDF fit', id='execution-dist-plot', prefix_id='detailed',", "= html.Div([ self._sc.header_bar('FX: Compliance Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='compliance-status'),", "downloadplot_caption='Download broker CSV', downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv' ), self._sc.horizontal_bar() ], style={'width': '1000px',", "/ @cuemacro # # See the License for the specific", "import OrderedDict from pandas.tseries.offsets import * from tcapy.vis.layoutdash import LayoutDash", "id='calculation-button', prefix_id='detailed'), # self.button(caption = 'Print PDF', id = 'detailed-print-pdf-button',", "Compliance page 'compliance_metric_table_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, 'compliance_download_link_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all',", "style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ # Secondary page", "= 'detailed-print-pdf-button', className = 'no-print'), # Orders self._sc.horizontal_bar(), self._sc.plot(caption='Orders: Timeline',", "= self._constants.available_algos_dictionary['All'] self.available_market_data = self._constants.available_market_data self.available_order_plot_lines = ['candlestick', 'mid', 'bid',", "\"23:59\", freq=\"15min\") ### create the possible values for drop down", "prefix_id='detailed', height=500), self._sc.plot(caption='Executions: Histogram vs PDF fit', id='execution-dist-plot', prefix_id='detailed', height=500),", "self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated', drop_down_values=self.available_grouped_algos, multiselect=True),", "{'client-orders': 'order_df', 'executions': 'trade_df'}, 'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df', 'order': 'sparse_market_order_df'}, 'detailed_markout_trade_order':", "prefix_id='compliance', columns=broker_cols, downloadplot_caption='Download broker CSV', downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv' ), self._sc.horizontal_bar() ],", "self.available_market_data = self._constants.available_market_data self.available_order_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'arrival',", "prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter by Time',", "pd import datetime from datetime import timedelta from collections import", "html.Div([ self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'), # self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'), #", "prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage to Mid (bp)', id='slippage-bounds-val', prefix_id='compliance', drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization',", "@cuemacro # # See the License for the specific language", "'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker', 'execution-by-venue': 'dist_trade_df_by/pdf/venue'}, # Compliance", "'bid', 'ask', 'buy trade', 'sell trade'] self.available_slippage_bounds = ['0.25', '0.5',", "prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Finish Time of Day', id='finish-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage", "trade', 'sell trade'] self.available_execution_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'buy", "selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date',", "in rep cur', 'slippage'] broker_cols = ['Date', 'by broker notional", "id='finish-date-dtpicker', prefix_id='compliance'), ]), self._sc.horizontal_bar(), self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance', columns=trade_outliers_cols,", ": 'aggregated', 'Compliance' : 'compliance'} trade_outliers_cols = ['Date', 'ticker', 'side',", "drop_down_values=self.available_times), self._sc.drop_down(caption='Finish Time of Day', id='finish-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage to", "of Day', id='finish-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage to Mid (bp)', id='slippage-bounds-val',", "self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: PDF fit (' + self._constants.reporting_currency + '", "self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance', drop_down_values=self.available_visualization) ]), self._sc.horizontal_bar(), html.Div([ self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'),", "'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker', 'execution-by-venue':", "self.available_event_types = self._constants.available_event_types self.available_metrics = self._constants.available_metrics self.available_reload = ['no', 'yes']", "list (for both detailed and aggregated pages) self.available_dates = [x.date()", "class LayoutDashImplGen(LayoutDash): \"\"\"This implements the LayoutDash abstract class, to create", "'1.0', '1.25', '1.5', '2.0', 'bid/ask'] # For aggregated page only", "analysing aggregated statistics over long periods of time, eg. who", ": self.available_times}, prefix_id='detailed'), self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates), ('finish-time-val', self.available_times)]), prefix_id='detailed'),", "'order_df'}, 'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order':", "'buy trade', 'sell trade'] self.available_slippage_bounds = ['0.25', '0.5', '1.0', '1.25',", "import os ## Date/time components import pandas as pd import", "height=500), # Execution trades self._sc.horizontal_bar(), self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot',", "id='market-data-val', prefix_id='aggregated', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated', drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric', id='metric-val',", "img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='detailed-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection", "prefix_id='aggregated', height=500), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'})", "self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance', drop_down_values=self.available_grouped_algos, multiselect=True),", "aggregated style analysis across multiple currency pairs and over multiple", "# Aggregated trader page 'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker', 'execution-by-venue': 'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order':", "page only self.available_times = [t.strftime(\"%H:%M\") for t in times] self.available_tickers", "Type', id='event-type-val', prefix_id='aggregated', drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated', drop_down_values=self.available_metrics), ]), self._sc.horizontal_bar(),", "available]) def construct_layout(self): self.page_content = html.Div([ dcc.Location(id='url', refresh=False), html.Div(id='page-content') ])", "self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event", "id='start-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Finish Time of Day', id='finish-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times),", "self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id={'start-date-val' :", "= pd.date_range( datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(), freq=BDay()) times = pd.date_range(\"0:00\",", "prefix_id='detailed', height=500), # Execution trades self._sc.horizontal_bar(), self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed',", "ok\", id='compliance-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start", "# self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'), # self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'),", "Outliers', id='execution-by-anomalous-table', prefix_id='compliance', columns=trade_outliers_cols, downloadplot_caption='Trade outliers CSV', downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance:", "only self.available_times = [t.strftime(\"%H:%M\") for t in times] self.available_tickers =", "prefix_id='compliance', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Start Time", "permissions and limitations under the License. # ## Web server", "prefix_id='compliance', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val',", "'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df', 'order-candle-timeline':", "web based GUI for the tcapy application. It creates two", "datetime from datetime import timedelta from collections import OrderedDict from", "def _flatten_dictionary(self, dictionary): available = dictionary['All'] available_groups = self._util_func.dict_key_list(dictionary.keys()) return", "'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order': {'execution-candle-timeline':", "# Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro #", "cur', 'benchmark', 'exec not', 'exec not in rep cur', 'slippage']", "self._constants.available_event_types self.available_metrics = self._constants.available_metrics self.available_reload = ['no', 'yes'] self.available_visualization =", "# # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro", "id='aggregated-status'), margin_left=5), self._sc.horizontal_bar(), # dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date',", "id='execution-by-ticker-timeline-plot', prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: PDF fit (' +", "msg_id='aggregated-status'), self._sc.horizontal_bar(), # self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]), self._sc.plot(caption='Aggregated Trader: Summary',", "'executions': 'trade'}, # 'markout_trade_orders' : {'client-orders': 'order_df', 'executions': 'trade_df'}, 'detailed_candle_timeline_trade_order':", "= 'saeedamen' # <NAME> / <EMAIL> # # Copyright 2017", "'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df', 'order-candle-timeline': 'sparse_market_order_df'}, # Aggregated trader page 'aggregated_bar_trade_order':", "url_prefix=url_prefix) available_dates = pd.date_range( datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(), freq=BDay()) times", "prefix_id='compliance', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val',", "{'client-orders': 'order', 'executions': 'trade'}, # 'markout_trade_orders' : {'client-orders': 'order_df', 'executions':", "'sparse_market_trade_df', 'order': 'sparse_market_order_df'}, 'detailed_markout_trade_order': {'execution': 'trade_df', 'order': 'order_df'}, 'detailed_table_trade_order': {'execution':", "tcapy application. It creates two web pages - detailed_page -", "self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated', drop_down_values=self.available_event_types),", "'saeedamen' # <NAME> / <EMAIL> # # Copyright 2017 Cuemacro", "the best broker? self.pages['aggregated'] = html.Div([ self._sc.header_bar('FX: Aggregated - Trader", "self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed', drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed', drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market Data',", "element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline', height=500), self._sc.plot(caption='Orders: Markout', id='order-markout-plot',", "offset=[-7,-1]), self._sc.plot(caption='Aggregated Trader: Summary', id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated", "prefix_id='compliance', drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance', drop_down_values=self.available_visualization) ]), self._sc.horizontal_bar(), html.Div([ self._sc.button(caption='Calculate',", "'sparse_market_trade_df', 'order-candle-timeline': 'sparse_market_order_df'}, # Aggregated trader page 'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker',", "freq=BDay()) times = pd.date_range(\"0:00\", \"23:59\", freq=\"15min\") ### create the possible", "for doing detailed tcapy analysis for a specific currency pair", "for a specific currency pair - aggregated_page - for more", "drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance',", "= self._constants.available_metrics self.available_reload = ['no', 'yes'] self.available_visualization = ['yes', 'no']", "'Aggregated' : 'aggregated', 'Compliance' : 'compliance'} trade_outliers_cols = ['Date', 'ticker',", "html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance',", "Web server components import dash_core_components as dcc import dash_html_components as", "Trader: Summary', id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: Timeline',", "columns=trade_outliers_cols, downloadplot_caption='Trade outliers CSV', downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance: Totals by Broker',", "language governing permissions and limitations under the License. # ##", "self._sc.drop_down(caption='Start Time of Day', id='start-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Finish Time of", "{'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, 'compliance_download_link_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, }", "drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed', drop_down_values=self.available_metrics) ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'),", "self._sc.drop_down(caption='Finish Time of Day', id='finish-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage to Mid", "self.available_visualization = ['yes', 'no'] self.construct_layout() def _flatten_dictionary(self, dictionary): available =", "Summary', id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot',", "id='finish-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage to Mid (bp)', id='slippage-bounds-val', prefix_id='compliance', drop_down_values=self.available_slippage_bounds),", "detailed_page - for doing detailed tcapy analysis for a specific", "timedelta from collections import OrderedDict from pandas.tseries.offsets import * from", "'benchmark', 'exec not', 'exec not in rep cur', 'slippage'] broker_cols", "5, 'marginLeft': 5, 'marginRight': 5}), ], style={'width': '1000px', 'marginRight': 'auto',", "For detailed page only self.available_times = [t.strftime(\"%H:%M\") for t in", "'1000px', 'display': 'inline-block', 'marginBottom': 5, 'marginTop': 5, 'marginLeft': 5, 'marginRight':", "drop_down_values=self.available_reload), self._sc.drop_down(caption='Start Time of Day', id='start-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Finish Time", "for analysing aggregated statistics over long periods of time, eg.", "prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated',", "'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker', 'execution-by-venue': 'dist_trade_df_by/pdf/venue'}, # Compliance page 'compliance_metric_table_trade_order':", "- for doing detailed tcapy analysis for a specific currency", "'auto'}) ################################################################################################################ self.pages['compliance'] = html.Div([ self._sc.header_bar('FX: Compliance Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict),", "the tcapy application. It creates two web pages - detailed_page", "Date', id='start-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker',", "drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated', drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated', drop_down_values=self.available_metrics),", "self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market", "{'execution': 'trade_df', 'order': 'order_df'}, 'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side',", "id='ticker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo',", "# For aggregated page only self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues =", "= self._constants.available_event_types self.available_metrics = self._constants.available_metrics self.available_reload = ['no', 'yes'] self.available_visualization", "'marginLeft': 'auto'}) ################################################################################################################ # Secondary page for analysing aggregated statistics", "= self._constants.available_venues_dictionary['All'] self.available_brokers = self._constants.available_brokers_dictionary['All'] self.available_algos = self._constants.available_algos_dictionary['All'] self.available_market_data =", "['candlestick', 'mid', 'bid', 'ask', 'buy trade', 'sell trade'] self.available_slippage_bounds =", "<EMAIL> # # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com /", "'trade_df'}, 'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df', 'order': 'sparse_market_order_df'}, 'detailed_markout_trade_order': {'execution': 'trade_df', 'order':", "analysis for a specific currency pair - aggregated_page - for", "height=500), self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Orders: Histogram vs PDF", "(eg. over the course of a few days) self.pages['detailed'] =", "prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance',", "id='calculation-button', prefix_id='aggregated'), # , msg_id='aggregated-status'), self._sc.horizontal_bar(), # self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val',", "# ## Web server components import dash_core_components as dcc import", "self._constants.available_market_data self.available_order_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'arrival', 'twap', 'vwap',", "= self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos =", "OrderedDict from pandas.tseries.offsets import * from tcapy.vis.layoutdash import LayoutDash ########################################################################################################################", "LayoutDashImplGen(LayoutDash): \"\"\"This implements the LayoutDash abstract class, to create the", "self._sc.header_bar('FX: Aggregated - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='aggregated-status'),", "prefix_id='aggregated', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event Type', id='event-type-val',", "style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) # ID flags self.id_flags", "'order', 'executions': 'trade'}, # 'markout_trade_orders' : {'client-orders': 'order_df', 'executions': 'trade_df'},", "Execution trades self._sc.horizontal_bar(), self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines), downloadplot_caption='Download", "prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv', height=500), self._sc.plot(caption='Executions: Markout',", "drop_down_values=self.available_visualization) ]), self._sc.horizontal_bar(), html.Div([ self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'), # self.date_picker(caption='Start Date',", "self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='aggregated-status'), margin_left=5), self._sc.horizontal_bar(), # dropdown selection boxes", "t in times] self.available_tickers = self._constants.available_tickers_dictionary['All'] self.available_venues = self._constants.available_venues_dictionary['All'] self.available_brokers", "'compliance_metric_table_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, 'compliance_download_link_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'},", "'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker', 'execution-by-venue': 'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'},", "'1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) # ID flags self.id_flags =", "self.available_metrics = self._constants.available_metrics self.available_reload = ['no', 'yes'] self.available_visualization = ['yes',", "Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='aggregated-status'), margin_left=5), self._sc.horizontal_bar(), #", "self._constants.available_metrics self.available_reload = ['no', 'yes'] self.available_visualization = ['yes', 'no'] self.construct_layout()", "create the possible values for drop down boxes on both", "under the License. # ## Web server components import dash_core_components", "days) self.pages['detailed'] = html.Div([ self._sc.header_bar('FX: Detailed - Trader Analysis', img='logo.png'),", "self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv',", "aggregated pages) self.available_dates = [x.date() for x in available_dates[::-1]] #", "self._sc.table(caption='Compliance: Totals by Broker', id='summary-by-broker-table', prefix_id='compliance', columns=broker_cols, downloadplot_caption='Download broker CSV',", "# Detailed trader page # 'timeline_trade_orders' : {'client-orders': 'order', 'executions':", "trade'] self.available_slippage_bounds = ['0.25', '0.5', '1.0', '1.25', '1.5', '2.0', 'bid/ask']", "prefix_id='aggregated', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val',", "Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro # #", "ID flags self.id_flags = { # Detailed trader page #", "prefix_id='aggregated'), # , msg_id='aggregated-status'), self._sc.horizontal_bar(), # self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]),", "over the course of a few days) self.pages['detailed'] = html.Div([", "self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'), # , msg_id='aggregated-status'), self._sc.horizontal_bar(), # self.date_picker_range(caption='Start/Finish", "PDF fit', id='order-dist-plot', prefix_id='detailed', height=500), # Execution trades self._sc.horizontal_bar(), self._sc.plot(caption='Executions:", ": 'detailed', 'Aggregated' : 'aggregated', 'Compliance' : 'compliance'} trade_outliers_cols =", "self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot', prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: PDF", "limitations under the License. # ## Web server components import", "the specific language governing permissions and limitations under the License.", "- aggregated_page - for more aggregated style analysis across multiple", "aggregated statistics over long periods of time, eg. who is", "import pandas as pd import datetime from datetime import timedelta", "drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance',", "'sparse_market_order_df'}, # Aggregated trader page 'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker', 'execution-by-venue': 'bar_trade_df_by/mean/venue'},", "x in available_dates[::-1]] # For detailed page only self.available_times =", "fit', id='execution-dist-plot', prefix_id='detailed', height=500), # Detailed tcapy markout table for", "'no'] self.construct_layout() def _flatten_dictionary(self, dictionary): available = dictionary['All'] available_groups =", "'timeline_trade_orders' : {'client-orders': 'order', 'executions': 'trade'}, # 'markout_trade_orders' : {'client-orders':", "id='finish-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val',", "self.available_execution_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv', height=500), self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed',", "'auto', 'marginLeft': 'auto'}) ################################################################################################################ # Secondary page for analysing aggregated", "detailed analysing of (eg. over the course of a few", "id='venue-val', prefix_id='compliance', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data',", "'detailed-print-pdf-button', className = 'no-print'), # Orders self._sc.horizontal_bar(), self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot',", "self.construct_layout() def _flatten_dictionary(self, dictionary): available = dictionary['All'] available_groups = self._util_func.dict_key_list(dictionary.keys())", "self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ self.pages['compliance']", "- http//www.cuemacro.com / @cuemacro # # See the License for", "prefix_id='detailed', drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed', drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed', drop_down_values=self.available_grouped_algos),", "self._sc.header_bar('FX: Compliance Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B(\"Status: ok\", id='compliance-status'), margin_left=5), self._sc.horizontal_bar(),", "style analysis across multiple currency pairs and over multiple time", "prefix_id='detailed', height=500), self._sc.plot(caption='Orders: Histogram vs PDF fit', id='order-dist-plot', prefix_id='detailed', height=500),", "more aggregated style analysis across multiple currency pairs and over", "self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market", "fit', id='order-dist-plot', prefix_id='detailed', height=500), # Execution trades self._sc.horizontal_bar(), self._sc.plot(caption='Executions: Timeline',", "prefix_id='detailed', drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed', drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed',", "prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated',", "PDF fit (' + self._constants.reporting_currency + ' notional)', id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'],", "prefix_id='aggregated', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated', drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated',", "]), self._sc.horizontal_bar(), html.Div([ self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'), # self.date_picker(caption='Start Date', id='start-date-dtpicker',", "# Execution trades self._sc.horizontal_bar(), self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines),", "for detailed analysing of (eg. over the course of a", "import LayoutDash ######################################################################################################################## class LayoutDashImplGen(LayoutDash): \"\"\"This implements the LayoutDash abstract", "flags self.id_flags = { # Detailed trader page # 'timeline_trade_orders'", "'dist_trade_df_by/pdf/ticker', 'execution-by-venue': 'dist_trade_df_by/pdf/venue'}, # Compliance page 'compliance_metric_table_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker':", "'aggregated', 'Compliance' : 'compliance'} trade_outliers_cols = ['Date', 'ticker', 'side', 'notional", "################################################################################################################ # Secondary page for analysing aggregated statistics over long", "= self._util_func.dict_key_list(dictionary.keys()) return self.flatten_list_of_strings([available_groups, available]) def construct_layout(self): self.page_content = html.Div([", "self._sc.horizontal_bar(), # dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated',", "drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed', drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed', drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market", "of Day', id='start-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Finish Time of Day', id='finish-time-of-day-val',", "Histogram vs PDF fit', id='order-dist-plot', prefix_id='detailed', height=500), # Execution trades", "* from tcapy.vis.layoutdash import LayoutDash ######################################################################################################################## class LayoutDashImplGen(LayoutDash): \"\"\"This implements", "drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated',", "prefix_id='compliance'), # self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'), ]), self._sc.horizontal_bar(), self._sc.table(caption='Compliance: Trade", "'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df', 'order-candle-timeline': 'sparse_market_order_df'},", "not', 'exec not in rep cur', 'slippage'] broker_cols = ['Date',", "multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance', drop_down_values=self.available_market_data),", "self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'), # , msg_id='aggregated-status'), self._sc.horizontal_bar(), # self.date_picker_range(caption='Start/Finish Dates',", "id='filter-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Start Time of Day', id='start-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times),", "'auto', 'marginLeft': 'auto'}) ################################################################################################################ self.pages['compliance'] = html.Div([ self._sc.header_bar('FX: Compliance Analysis',", "for executions html.Div([ html.H3('Executions: Markout Table'), html.Div(id='detailed-execution-table') ], style={'width': '1000px',", "Day', id='start-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Finish Time of Day', id='finish-time-of-day-val', prefix_id='compliance',", "and over multiple time periods \"\"\" def __init__(self, app=None, constants=None,", "'mid', 'bid', 'ask', 'arrival', 'twap', 'vwap', 'buy trade', 'sell trade']", "for drop down boxes on both pages # Reverse date", "pairs and over multiple time periods \"\"\" def __init__(self, app=None,", "'1.5', '2.0', 'bid/ask'] # For aggregated page only self.available_grouped_tickers =", "prefix_id='compliance', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val',", "# ID flags self.id_flags = { # Detailed trader page", "analysing of (eg. over the course of a few days)", "drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val',", "self._sc.drop_down(caption='Slippage to Mid (bp)', id='slippage-bounds-val', prefix_id='compliance', drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance',", "self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) # ID", "Main page for detailed analysing of (eg. over the course", "downloadplot_caption='Download CSV', downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv', height=500), self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed', height=500),", "'compliance'} trade_outliers_cols = ['Date', 'ticker', 'side', 'notional cur', 'benchmark', 'exec", "{'execution': 'sparse_market_trade_df', 'order': 'sparse_market_order_df'}, 'detailed_markout_trade_order': {'execution': 'trade_df', 'order': 'order_df'}, 'detailed_table_trade_order':", "html.Div(id='page-content') ]) link_bar_dict = {'Detailed' : 'detailed', 'Aggregated' : 'aggregated',", "the possible values for drop down boxes on both pages", "self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types = self._constants.available_event_types self.available_metrics = self._constants.available_metrics self.available_reload = ['no',", "= ['no', 'yes'] self.available_visualization = ['yes', 'no'] self.construct_layout() def _flatten_dictionary(self," ]
[ "import CaseData @pytest.fixture( scope='session', params=( lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome(", "1), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21'", "lattice_size=(2, 2, 1), optimizer=stk.PeriodicCollapser(), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C'", "functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), optimizer=stk.PeriodicCollapser(), ), ), smiles=(", "functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ),", "'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), lambda name:", "name=name, ), lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1',", "smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)'", "smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ),", "), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2]['", "molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+]('", "), lattice_size=(2, 2, 1), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C'", "'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206['", "building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ),", "name=name, ), ), ) def cof_periodic_kagome(request) -> CaseData: return request.param(", "params=( lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()],", "), lattice_size=(2, 2, 1), optimizer=stk.PeriodicCollapser(), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N='", "'[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]'", "optimizer=stk.PeriodicCollapser(), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21'", "stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2,", "2, 1), optimizer=stk.PeriodicCollapser(), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1'", "'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), optimizer=stk.PeriodicCollapser(),", "'2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), ), ) def cof_periodic_kagome(request)", "), lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()],", "'5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), lambda", "CaseData @pytest.fixture( scope='session', params=( lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=(", "), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), optimizer=stk.PeriodicCollapser(), ), ),", "...case_data import CaseData @pytest.fixture( scope='session', params=( lambda name: CaseData( molecule=stk.ConstructedMolecule(", "'2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), lambda name: CaseData( molecule=stk.ConstructedMolecule(", "'211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=(", "), ), ) def cof_periodic_kagome(request) -> CaseData: return request.param( f'{request.fixturename}{request.param_index}',", "']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), lambda name: CaseData(", "'+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C'", "functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]='", "CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=(", "), ), lattice_size=(2, 2, 1), optimizer=stk.PeriodicCollapser(), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]='", "scope='session', params=( lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1',", "lattice_size=(2, 2, 1), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1'", "'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), optimizer=stk.PeriodicCollapser(), ),", "'5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), ),", "'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), ), )", "topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1'", "smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1),", "), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N'", "), ) def cof_periodic_kagome(request) -> CaseData: return request.param( f'{request.fixturename}{request.param_index}', )", "@pytest.fixture( scope='session', params=( lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock(", "import stk from ...case_data import CaseData @pytest.fixture( scope='session', params=( lambda", "), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), ), ), smiles=(", "'5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N='", "'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ),", "stk from ...case_data import CaseData @pytest.fixture( scope='session', params=( lambda name:", "']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), ), ) def", "'211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), ), ) def cof_periodic_kagome(request) -> CaseData:", "from ...case_data import CaseData @pytest.fixture( scope='session', params=( lambda name: CaseData(", "pytest import stk from ...case_data import CaseData @pytest.fixture( scope='session', params=(", "'[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+'", "'[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ),", "'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), ), ),", "'[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome(", "), name=name, ), lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock(", "1), optimizer=stk.PeriodicCollapser(), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%'", "')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121'", "'20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%'", "), ), lattice_size=(2, 2, 1), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N='", "'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), ),", "2, 1), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%'", "stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()],", "lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ),", "), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2,", "), name=name, ), ), ) def cof_periodic_kagome(request) -> CaseData: return", "'[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), ), ) def cof_periodic_kagome(request) ->", "name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock(", "import pytest import stk from ...case_data import CaseData @pytest.fixture( scope='session',", "'=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name," ]
[ "# coding=utf-8 # Copyright 2021 The OneFlow Authors. All rights", "torch logger = logging.getLogger(__name__) def convert_qkv_weight(cfg, value): \"\"\" Convert qkv.weight", "2.0 (the \"License\"); # you may not use this file", "value): \"\"\" Convert qkv.weight to be compatible with LiBai transformer", "key: key = key.replace(\"mlp.fc1\", \"mlp.dense_h_to_4h\") elif \"mlp.fc2\" in key: key", "= key.replace(\"attn.proj\", \"self_attention.dense\") elif \"norm2\" in key: key = key.replace(\"norm2\",", "key: # to global tensor key, val = filter_keys(key, value,", "\"attn.proj\" in key: key = key.replace(\"attn.proj\", \"self_attention.dense\") elif \"norm2\" in", "value.view(3, num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size * 3) ) return qkv_bias", "val.detach().cpu().numpy() val = flow.tensor(val).to_global( sbp=flow.sbp.broadcast, placement=flow.placement(\"cuda\", ranks=[0]) ) new_parameters[key] =", "weights. Torch weight can be downloaded from the original repo:", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "2021 The OneFlow Authors. All rights reserved. # # Licensed", "key: key = key.replace(\"attn.proj\", \"self_attention.dense\") elif \"norm2\" in key: key", "key = key.replace(\"norm2\", \"post_attention_layernorm\") elif \"mlp.fc1\" in key: key =", "value: qkv.weight in the loaded checkpoint \"\"\" num_heads = cfg.model.num_heads", "value) elif \"attn.proj\" in key: key = key.replace(\"attn.proj\", \"self_attention.dense\") elif", "in the loaded checkpoint \"\"\" num_heads = cfg.model.num_heads hidden_size =", "filter_keys(key, value, cfg) val = val.detach().cpu().numpy() val = flow.tensor(val).to_global( sbp=flow.sbp.broadcast,", "key: value = convert_qkv_bias(cfg, value) elif \"attn.proj\" in key: key", "use this file except in compliance with the License. #", "for key, value in parameters.items(): if \"num_batches_tracked\" not in key:", "keys and values to match LiBai's MAE model \"\"\" if", "Convert qkv.weight to be compatible with LiBai transformer layer Args:", "reserved. # # Licensed under the Apache License, Version 2.0", "in key: key = key.replace(\"mlp.fc1\", \"mlp.dense_h_to_4h\") elif \"mlp.fc2\" in key:", "\"bias\" in key: value = convert_qkv_bias(cfg, value) elif \"attn.proj\" in", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "License. # You may obtain a copy of the License", "config file value: qkv.weight in the loaded checkpoint \"\"\" num_heads", "under the License is distributed on an \"AS IS\" BASIS,", "License for the specific language governing permissions and # limitations", "with LiBai transformer layer Args: cfg: config file value: qkv.weight", "value, cfg): \"\"\" Filtering the state_dict keys and values to", ".view(hidden_size * 3, hidden_size) ) return qkv_weight def convert_qkv_bias(cfg, value):", "value = convert_qkv_weight(cfg, value) if \"bias\" in key: value =", ".permute(1, 0, 2, 3) .contiguous() .view(hidden_size * 3, hidden_size) )", "qkv_weight def convert_qkv_bias(cfg, value): \"\"\" Convert qkv.bias to be compatible", "from the original repo: https://github.com/facebookresearch/mae \"\"\" torch_dict = torch.load(path, map_location=\"cpu\")[\"model\"]", "Torch weight can be downloaded from the original repo: https://github.com/facebookresearch/mae", "torch_dict new_parameters = dict() for key, value in parameters.items(): if", "\"post_attention_layernorm\") elif \"mlp.fc1\" in key: key = key.replace(\"mlp.fc1\", \"mlp.dense_h_to_4h\") elif", "LiBai's MAE model \"\"\" if \"norm1\" in key: key =", "return key, value def load_torch_checkpoint(model, cfg, path=\"./mae_finetuned_vit_base.pth\", strict=False): \"\"\" Load", "in compliance with the License. # You may obtain a", "num_heads) qkv_weight = ( value.view([3, num_heads, head_size, hidden_size]) .permute(1, 0,", "software # distributed under the License is distributed on an", "value: qkv.bias in the loaded checkpoint \"\"\" num_heads = cfg.model.num_heads", "= flow.tensor(val).to_global( sbp=flow.sbp.broadcast, placement=flow.placement(\"cuda\", ranks=[0]) ) new_parameters[key] = val model.load_state_dict(new_parameters,", "cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_weight", ") return qkv_weight def convert_qkv_bias(cfg, value): \"\"\" Convert qkv.bias to", "key.replace(\"mlp.fc2\", \"mlp.dense_4h_to_h\") elif \"fc_norm\" in key: key = key.replace(\"fc_norm\", \"norm\")", "import torch logger = logging.getLogger(__name__) def convert_qkv_weight(cfg, value): \"\"\" Convert", "Load checkpoint from the given torch weights. Torch weight can", "\"mlp.dense_4h_to_h\") elif \"fc_norm\" in key: key = key.replace(\"fc_norm\", \"norm\") return", "= dict() for key, value in parameters.items(): if \"num_batches_tracked\" not", "qkv.bias in the loaded checkpoint \"\"\" num_heads = cfg.model.num_heads hidden_size", "ranks=[0]) ) new_parameters[key] = val model.load_state_dict(new_parameters, strict=strict) print(\"Successfully load torch", "def convert_qkv_weight(cfg, value): \"\"\" Convert qkv.weight to be compatible with", "\"attn.qkv\" in key: key = key.replace(\"attn.qkv\", \"self_attention.query_key_value\") if \"weight\" in", "with LiBai transformer layer Args: cfg: config file value: qkv.bias", "torch weights. Torch weight can be downloaded from the original", "value) if \"bias\" in key: value = convert_qkv_bias(cfg, value) elif", "= key.replace(\"norm1\", \"input_layernorm\") elif \"attn.qkv\" in key: key = key.replace(\"attn.qkv\",", "qkv_weight = ( value.view([3, num_heads, head_size, hidden_size]) .permute(1, 0, 2,", "in key: value = convert_qkv_weight(cfg, value) if \"bias\" in key:", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "def convert_qkv_bias(cfg, value): \"\"\" Convert qkv.bias to be compatible with", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "to be compatible with LiBai transformer layer Args: cfg: config", "and # limitations under the License. import logging import oneflow", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "0, 2).contiguous().view(hidden_size * 3) ) return qkv_bias def filter_keys(key, value,", "to in writing, software # distributed under the License is", "cfg: config file value: qkv.weight in the loaded checkpoint \"\"\"", "qkv.weight in the loaded checkpoint \"\"\" num_heads = cfg.model.num_heads hidden_size", "qkv_bias def filter_keys(key, value, cfg): \"\"\" Filtering the state_dict keys", "# See the License for the specific language governing permissions", "or agreed to in writing, software # distributed under the", "match LiBai's MAE model \"\"\" if \"norm1\" in key: key", "required by applicable law or agreed to in writing, software", "cfg) val = val.detach().cpu().numpy() val = flow.tensor(val).to_global( sbp=flow.sbp.broadcast, placement=flow.placement(\"cuda\", ranks=[0])", "3, hidden_size) ) return qkv_weight def convert_qkv_bias(cfg, value): \"\"\" Convert", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "OneFlow Authors. All rights reserved. # # Licensed under the", "with the License. # You may obtain a copy of", "downloaded from the original repo: https://github.com/facebookresearch/mae \"\"\" torch_dict = torch.load(path,", "= int(hidden_size / num_heads) qkv_bias = ( value.view(3, num_heads, head_size).permute(1,", "= key.replace(\"mlp.fc1\", \"mlp.dense_h_to_4h\") elif \"mlp.fc2\" in key: key = key.replace(\"mlp.fc2\",", "\"norm\") return key, value def load_torch_checkpoint(model, cfg, path=\"./mae_finetuned_vit_base.pth\", strict=False): \"\"\"", "given torch weights. Torch weight can be downloaded from the", "= torch_dict new_parameters = dict() for key, value in parameters.items():", "compliance with the License. # You may obtain a copy", "weight can be downloaded from the original repo: https://github.com/facebookresearch/mae \"\"\"", "agreed to in writing, software # distributed under the License", "distributed under the License is distributed on an \"AS IS\"", "oneflow as flow import torch logger = logging.getLogger(__name__) def convert_qkv_weight(cfg,", "elif \"norm2\" in key: key = key.replace(\"norm2\", \"post_attention_layernorm\") elif \"mlp.fc1\"", "qkv.bias to be compatible with LiBai transformer layer Args: cfg:", "layer Args: cfg: config file value: qkv.weight in the loaded", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "= torch.load(path, map_location=\"cpu\")[\"model\"] parameters = torch_dict new_parameters = dict() for", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "\"input_layernorm\") elif \"attn.qkv\" in key: key = key.replace(\"attn.qkv\", \"self_attention.query_key_value\") if", "not use this file except in compliance with the License.", "writing, software # distributed under the License is distributed on", "transformer layer Args: cfg: config file value: qkv.weight in the", "can be downloaded from the original repo: https://github.com/facebookresearch/mae \"\"\" torch_dict", "/ num_heads) qkv_weight = ( value.view([3, num_heads, head_size, hidden_size]) .permute(1,", "you may not use this file except in compliance with", "torch.load(path, map_location=\"cpu\")[\"model\"] parameters = torch_dict new_parameters = dict() for key,", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "in key: key = key.replace(\"fc_norm\", \"norm\") return key, value def", "the given torch weights. Torch weight can be downloaded from", "2, 3) .contiguous() .view(hidden_size * 3, hidden_size) ) return qkv_weight", "in key: key = key.replace(\"attn.proj\", \"self_attention.dense\") elif \"norm2\" in key:", ".contiguous() .view(hidden_size * 3, hidden_size) ) return qkv_weight def convert_qkv_bias(cfg,", "loaded checkpoint \"\"\" num_heads = cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size", "file value: qkv.weight in the loaded checkpoint \"\"\" num_heads =", "in key: value = convert_qkv_bias(cfg, value) elif \"attn.proj\" in key:", "CONDITIONS OF ANY KIND, either express or implied. # See", "= int(hidden_size / num_heads) qkv_weight = ( value.view([3, num_heads, head_size,", "convert_qkv_weight(cfg, value) if \"bias\" in key: value = convert_qkv_bias(cfg, value)", "# to global tensor key, val = filter_keys(key, value, cfg)", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "hidden_size]) .permute(1, 0, 2, 3) .contiguous() .view(hidden_size * 3, hidden_size)", "\"mlp.fc1\" in key: key = key.replace(\"mlp.fc1\", \"mlp.dense_h_to_4h\") elif \"mlp.fc2\" in", "value = convert_qkv_bias(cfg, value) elif \"attn.proj\" in key: key =", "key = key.replace(\"mlp.fc2\", \"mlp.dense_4h_to_h\") elif \"fc_norm\" in key: key =", "key.replace(\"fc_norm\", \"norm\") return key, value def load_torch_checkpoint(model, cfg, path=\"./mae_finetuned_vit_base.pth\", strict=False):", "= convert_qkv_bias(cfg, value) elif \"attn.proj\" in key: key = key.replace(\"attn.proj\",", "head_size).permute(1, 0, 2).contiguous().view(hidden_size * 3) ) return qkv_bias def filter_keys(key,", "2).contiguous().view(hidden_size * 3) ) return qkv_bias def filter_keys(key, value, cfg):", "key = key.replace(\"fc_norm\", \"norm\") return key, value def load_torch_checkpoint(model, cfg,", "= val model.load_state_dict(new_parameters, strict=strict) print(\"Successfully load torch mae checkpoint.\") return", "logging.getLogger(__name__) def convert_qkv_weight(cfg, value): \"\"\" Convert qkv.weight to be compatible", "values to match LiBai's MAE model \"\"\" if \"norm1\" in", "convert_qkv_bias(cfg, value) elif \"attn.proj\" in key: key = key.replace(\"attn.proj\", \"self_attention.dense\")", "val = filter_keys(key, value, cfg) val = val.detach().cpu().numpy() val =", "* 3, hidden_size) ) return qkv_weight def convert_qkv_bias(cfg, value): \"\"\"", "= key.replace(\"fc_norm\", \"norm\") return key, value def load_torch_checkpoint(model, cfg, path=\"./mae_finetuned_vit_base.pth\",", "\"\"\" torch_dict = torch.load(path, map_location=\"cpu\")[\"model\"] parameters = torch_dict new_parameters =", "OR CONDITIONS OF ANY KIND, either express or implied. #", "* 3) ) return qkv_bias def filter_keys(key, value, cfg): \"\"\"", "key: key = key.replace(\"fc_norm\", \"norm\") return key, value def load_torch_checkpoint(model,", "return qkv_bias def filter_keys(key, value, cfg): \"\"\" Filtering the state_dict", "\"\"\" Filtering the state_dict keys and values to match LiBai's", "the License is distributed on an \"AS IS\" BASIS, #", "hidden_size) ) return qkv_weight def convert_qkv_bias(cfg, value): \"\"\" Convert qkv.bias", "if \"bias\" in key: value = convert_qkv_bias(cfg, value) elif \"attn.proj\"", "logger = logging.getLogger(__name__) def convert_qkv_weight(cfg, value): \"\"\" Convert qkv.weight to", "logging import oneflow as flow import torch logger = logging.getLogger(__name__)", "cfg, path=\"./mae_finetuned_vit_base.pth\", strict=False): \"\"\" Load checkpoint from the given torch", "governing permissions and # limitations under the License. import logging", "= ( value.view([3, num_heads, head_size, hidden_size]) .permute(1, 0, 2, 3)", "checkpoint from the given torch weights. Torch weight can be", "dict() for key, value in parameters.items(): if \"num_batches_tracked\" not in", "cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_bias = ( value.view(3,", "key.replace(\"norm1\", \"input_layernorm\") elif \"attn.qkv\" in key: key = key.replace(\"attn.qkv\", \"self_attention.query_key_value\")", "elif \"attn.proj\" in key: key = key.replace(\"attn.proj\", \"self_attention.dense\") elif \"norm2\"", "law or agreed to in writing, software # distributed under", "the loaded checkpoint \"\"\" num_heads = cfg.model.num_heads hidden_size = cfg.model.embed_dim", "cfg): \"\"\" Filtering the state_dict keys and values to match", "\"\"\" if \"norm1\" in key: key = key.replace(\"norm1\", \"input_layernorm\") elif", "hidden_size = cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_weight =", "head_size = int(hidden_size / num_heads) qkv_bias = ( value.view(3, num_heads,", "3) .contiguous() .view(hidden_size * 3, hidden_size) ) return qkv_weight def", "tensor key, val = filter_keys(key, value, cfg) val = val.detach().cpu().numpy()", "key: key = key.replace(\"attn.qkv\", \"self_attention.query_key_value\") if \"weight\" in key: value", "= ( value.view(3, num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size * 3) )", "int(hidden_size / num_heads) qkv_weight = ( value.view([3, num_heads, head_size, hidden_size])", "convert_qkv_bias(cfg, value): \"\"\" Convert qkv.bias to be compatible with LiBai", "val = flow.tensor(val).to_global( sbp=flow.sbp.broadcast, placement=flow.placement(\"cuda\", ranks=[0]) ) new_parameters[key] = val", "( value.view(3, num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size * 3) ) return", "parameters = torch_dict new_parameters = dict() for key, value in", "may obtain a copy of the License at # #", "import oneflow as flow import torch logger = logging.getLogger(__name__) def", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "= logging.getLogger(__name__) def convert_qkv_weight(cfg, value): \"\"\" Convert qkv.weight to be", "load_torch_checkpoint(model, cfg, path=\"./mae_finetuned_vit_base.pth\", strict=False): \"\"\" Load checkpoint from the given", "flow import torch logger = logging.getLogger(__name__) def convert_qkv_weight(cfg, value): \"\"\"", "may not use this file except in compliance with the", "cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_bias", "key.replace(\"attn.proj\", \"self_attention.dense\") elif \"norm2\" in key: key = key.replace(\"norm2\", \"post_attention_layernorm\")", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "key.replace(\"mlp.fc1\", \"mlp.dense_h_to_4h\") elif \"mlp.fc2\" in key: key = key.replace(\"mlp.fc2\", \"mlp.dense_4h_to_h\")", "new_parameters[key] = val model.load_state_dict(new_parameters, strict=strict) print(\"Successfully load torch mae checkpoint.\")", "\"\"\" num_heads = cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size = int(hidden_size", "this file except in compliance with the License. # You", "if \"num_batches_tracked\" not in key: # to global tensor key,", "elif \"mlp.fc1\" in key: key = key.replace(\"mlp.fc1\", \"mlp.dense_h_to_4h\") elif \"mlp.fc2\"", "num_heads, head_size, hidden_size]) .permute(1, 0, 2, 3) .contiguous() .view(hidden_size *", "int(hidden_size / num_heads) qkv_bias = ( value.view(3, num_heads, head_size).permute(1, 0,", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "to global tensor key, val = filter_keys(key, value, cfg) val", "Args: cfg: config file value: qkv.bias in the loaded checkpoint", "<reponame>Oneflow-Inc/libai # coding=utf-8 # Copyright 2021 The OneFlow Authors. All", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "\"\"\" Convert qkv.bias to be compatible with LiBai transformer layer", "num_heads = cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size = int(hidden_size /", "key.replace(\"attn.qkv\", \"self_attention.query_key_value\") if \"weight\" in key: value = convert_qkv_weight(cfg, value)", "rights reserved. # # Licensed under the Apache License, Version", "convert_qkv_weight(cfg, value): \"\"\" Convert qkv.weight to be compatible with LiBai", "\"self_attention.query_key_value\") if \"weight\" in key: value = convert_qkv_weight(cfg, value) if", "be compatible with LiBai transformer layer Args: cfg: config file", "language governing permissions and # limitations under the License. import", "Filtering the state_dict keys and values to match LiBai's MAE", "import logging import oneflow as flow import torch logger =", "be downloaded from the original repo: https://github.com/facebookresearch/mae \"\"\" torch_dict =", "placement=flow.placement(\"cuda\", ranks=[0]) ) new_parameters[key] = val model.load_state_dict(new_parameters, strict=strict) print(\"Successfully load", "checkpoint \"\"\" num_heads = cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size =", "in key: # to global tensor key, val = filter_keys(key,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "limitations under the License. import logging import oneflow as flow", "\"norm2\" in key: key = key.replace(\"norm2\", \"post_attention_layernorm\") elif \"mlp.fc1\" in", "= key.replace(\"norm2\", \"post_attention_layernorm\") elif \"mlp.fc1\" in key: key = key.replace(\"mlp.fc1\",", "the state_dict keys and values to match LiBai's MAE model", "elif \"fc_norm\" in key: key = key.replace(\"fc_norm\", \"norm\") return key,", "key = key.replace(\"mlp.fc1\", \"mlp.dense_h_to_4h\") elif \"mlp.fc2\" in key: key =", "global tensor key, val = filter_keys(key, value, cfg) val =", "or implied. # See the License for the specific language", "in key: key = key.replace(\"norm2\", \"post_attention_layernorm\") elif \"mlp.fc1\" in key:", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "filter_keys(key, value, cfg): \"\"\" Filtering the state_dict keys and values", "key = key.replace(\"attn.qkv\", \"self_attention.query_key_value\") if \"weight\" in key: value =", "elif \"mlp.fc2\" in key: key = key.replace(\"mlp.fc2\", \"mlp.dense_4h_to_h\") elif \"fc_norm\"", "qkv_bias = ( value.view(3, num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size * 3)", "key.replace(\"norm2\", \"post_attention_layernorm\") elif \"mlp.fc1\" in key: key = key.replace(\"mlp.fc1\", \"mlp.dense_h_to_4h\")", "= convert_qkv_weight(cfg, value) if \"bias\" in key: value = convert_qkv_bias(cfg,", "strict=False): \"\"\" Load checkpoint from the given torch weights. Torch", "the License. import logging import oneflow as flow import torch", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "under the License. import logging import oneflow as flow import", "compatible with LiBai transformer layer Args: cfg: config file value:", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "Copyright 2021 The OneFlow Authors. All rights reserved. # #", "elif \"attn.qkv\" in key: key = key.replace(\"attn.qkv\", \"self_attention.query_key_value\") if \"weight\"", "Convert qkv.bias to be compatible with LiBai transformer layer Args:", "\"num_batches_tracked\" not in key: # to global tensor key, val", "License. import logging import oneflow as flow import torch logger", "\"\"\" Convert qkv.weight to be compatible with LiBai transformer layer", "not in key: # to global tensor key, val =", "# # Unless required by applicable law or agreed to", "\"fc_norm\" in key: key = key.replace(\"fc_norm\", \"norm\") return key, value", "( value.view([3, num_heads, head_size, hidden_size]) .permute(1, 0, 2, 3) .contiguous()", "torch_dict = torch.load(path, map_location=\"cpu\")[\"model\"] parameters = torch_dict new_parameters = dict()", "= val.detach().cpu().numpy() val = flow.tensor(val).to_global( sbp=flow.sbp.broadcast, placement=flow.placement(\"cuda\", ranks=[0]) ) new_parameters[key]", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "in key: key = key.replace(\"attn.qkv\", \"self_attention.query_key_value\") if \"weight\" in key:", "Version 2.0 (the \"License\"); # you may not use this", "coding=utf-8 # Copyright 2021 The OneFlow Authors. All rights reserved.", "state_dict keys and values to match LiBai's MAE model \"\"\"", "if \"weight\" in key: value = convert_qkv_weight(cfg, value) if \"bias\"", "config file value: qkv.bias in the loaded checkpoint \"\"\" num_heads", "if \"norm1\" in key: key = key.replace(\"norm1\", \"input_layernorm\") elif \"attn.qkv\"", "value, cfg) val = val.detach().cpu().numpy() val = flow.tensor(val).to_global( sbp=flow.sbp.broadcast, placement=flow.placement(\"cuda\",", "implied. # See the License for the specific language governing", "value): \"\"\" Convert qkv.bias to be compatible with LiBai transformer", "under the Apache License, Version 2.0 (the \"License\"); # you", "and values to match LiBai's MAE model \"\"\" if \"norm1\"", "path=\"./mae_finetuned_vit_base.pth\", strict=False): \"\"\" Load checkpoint from the given torch weights.", "LiBai transformer layer Args: cfg: config file value: qkv.bias in", "All rights reserved. # # Licensed under the Apache License,", "by applicable law or agreed to in writing, software #", "transformer layer Args: cfg: config file value: qkv.bias in the", "The OneFlow Authors. All rights reserved. # # Licensed under", "key, value def load_torch_checkpoint(model, cfg, path=\"./mae_finetuned_vit_base.pth\", strict=False): \"\"\" Load checkpoint", "= cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size = int(hidden_size / num_heads)", "head_size = int(hidden_size / num_heads) qkv_weight = ( value.view([3, num_heads,", "= filter_keys(key, value, cfg) val = val.detach().cpu().numpy() val = flow.tensor(val).to_global(", "original repo: https://github.com/facebookresearch/mae \"\"\" torch_dict = torch.load(path, map_location=\"cpu\")[\"model\"] parameters =", "Authors. All rights reserved. # # Licensed under the Apache", "num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size * 3) ) return qkv_bias def", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", ") return qkv_bias def filter_keys(key, value, cfg): \"\"\" Filtering the", "Unless required by applicable law or agreed to in writing,", "= key.replace(\"mlp.fc2\", \"mlp.dense_4h_to_h\") elif \"fc_norm\" in key: key = key.replace(\"fc_norm\",", "val model.load_state_dict(new_parameters, strict=strict) print(\"Successfully load torch mae checkpoint.\") return model", "\"self_attention.dense\") elif \"norm2\" in key: key = key.replace(\"norm2\", \"post_attention_layernorm\") elif", "key = key.replace(\"norm1\", \"input_layernorm\") elif \"attn.qkv\" in key: key =", "the original repo: https://github.com/facebookresearch/mae \"\"\" torch_dict = torch.load(path, map_location=\"cpu\")[\"model\"] parameters", "the specific language governing permissions and # limitations under the", "\"mlp.dense_h_to_4h\") elif \"mlp.fc2\" in key: key = key.replace(\"mlp.fc2\", \"mlp.dense_4h_to_h\") elif", "applicable law or agreed to in writing, software # distributed", "key: key = key.replace(\"norm2\", \"post_attention_layernorm\") elif \"mlp.fc1\" in key: key", "new_parameters = dict() for key, value in parameters.items(): if \"num_batches_tracked\"", "val = val.detach().cpu().numpy() val = flow.tensor(val).to_global( sbp=flow.sbp.broadcast, placement=flow.placement(\"cuda\", ranks=[0]) )", "to match LiBai's MAE model \"\"\" if \"norm1\" in key:", "qkv.weight to be compatible with LiBai transformer layer Args: cfg:", "as flow import torch logger = logging.getLogger(__name__) def convert_qkv_weight(cfg, value):", "num_heads) qkv_bias = ( value.view(3, num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size *", "sbp=flow.sbp.broadcast, placement=flow.placement(\"cuda\", ranks=[0]) ) new_parameters[key] = val model.load_state_dict(new_parameters, strict=strict) print(\"Successfully", "value def load_torch_checkpoint(model, cfg, path=\"./mae_finetuned_vit_base.pth\", strict=False): \"\"\" Load checkpoint from", "Args: cfg: config file value: qkv.weight in the loaded checkpoint", "in writing, software # distributed under the License is distributed", "value in parameters.items(): if \"num_batches_tracked\" not in key: # to", "parameters.items(): if \"num_batches_tracked\" not in key: # to global tensor", "map_location=\"cpu\")[\"model\"] parameters = torch_dict new_parameters = dict() for key, value", "def load_torch_checkpoint(model, cfg, path=\"./mae_finetuned_vit_base.pth\", strict=False): \"\"\" Load checkpoint from the", "from the given torch weights. Torch weight can be downloaded", "/ num_heads) qkv_bias = ( value.view(3, num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size", "key = key.replace(\"attn.proj\", \"self_attention.dense\") elif \"norm2\" in key: key =", "hidden_size = cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_bias =", "def filter_keys(key, value, cfg): \"\"\" Filtering the state_dict keys and", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "= cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_bias = (", "# You may obtain a copy of the License at", "cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_weight = ( value.view([3,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "flow.tensor(val).to_global( sbp=flow.sbp.broadcast, placement=flow.placement(\"cuda\", ranks=[0]) ) new_parameters[key] = val model.load_state_dict(new_parameters, strict=strict)", "\"norm1\" in key: key = key.replace(\"norm1\", \"input_layernorm\") elif \"attn.qkv\" in", "MAE model \"\"\" if \"norm1\" in key: key = key.replace(\"norm1\",", "repo: https://github.com/facebookresearch/mae \"\"\" torch_dict = torch.load(path, map_location=\"cpu\")[\"model\"] parameters = torch_dict", "= key.replace(\"attn.qkv\", \"self_attention.query_key_value\") if \"weight\" in key: value = convert_qkv_weight(cfg,", "the License for the specific language governing permissions and #", "value.view([3, num_heads, head_size, hidden_size]) .permute(1, 0, 2, 3) .contiguous() .view(hidden_size", "in key: key = key.replace(\"mlp.fc2\", \"mlp.dense_4h_to_h\") elif \"fc_norm\" in key:", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "\"weight\" in key: value = convert_qkv_weight(cfg, value) if \"bias\" in", ") new_parameters[key] = val model.load_state_dict(new_parameters, strict=strict) print(\"Successfully load torch mae", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "in key: key = key.replace(\"norm1\", \"input_layernorm\") elif \"attn.qkv\" in key:", "in parameters.items(): if \"num_batches_tracked\" not in key: # to global", "permissions and # limitations under the License. import logging import", "# Copyright 2021 The OneFlow Authors. All rights reserved. #", "layer Args: cfg: config file value: qkv.bias in the loaded", "https://github.com/facebookresearch/mae \"\"\" torch_dict = torch.load(path, map_location=\"cpu\")[\"model\"] parameters = torch_dict new_parameters", "3) ) return qkv_bias def filter_keys(key, value, cfg): \"\"\" Filtering", "key: value = convert_qkv_weight(cfg, value) if \"bias\" in key: value", "0, 2, 3) .contiguous() .view(hidden_size * 3, hidden_size) ) return", "key, value in parameters.items(): if \"num_batches_tracked\" not in key: #", "\"License\"); # you may not use this file except in", "LiBai transformer layer Args: cfg: config file value: qkv.weight in", "# limitations under the License. import logging import oneflow as", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "model \"\"\" if \"norm1\" in key: key = key.replace(\"norm1\", \"input_layernorm\")", "key: key = key.replace(\"norm1\", \"input_layernorm\") elif \"attn.qkv\" in key: key", "\"\"\" Load checkpoint from the given torch weights. Torch weight", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "\"mlp.fc2\" in key: key = key.replace(\"mlp.fc2\", \"mlp.dense_4h_to_h\") elif \"fc_norm\" in", "file value: qkv.bias in the loaded checkpoint \"\"\" num_heads =", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "head_size, hidden_size]) .permute(1, 0, 2, 3) .contiguous() .view(hidden_size * 3,", "return qkv_weight def convert_qkv_bias(cfg, value): \"\"\" Convert qkv.bias to be", "key, val = filter_keys(key, value, cfg) val = val.detach().cpu().numpy() val", "cfg: config file value: qkv.bias in the loaded checkpoint \"\"\"", "You may obtain a copy of the License at #", "key: key = key.replace(\"mlp.fc2\", \"mlp.dense_4h_to_h\") elif \"fc_norm\" in key: key", "= cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_weight = (", "the Apache License, Version 2.0 (the \"License\"); # you may" ]
[]
[ "and (self.img_n_lin <= data_size_lin) and (self.img_n_pix <= data_size_pxl): data =", "@abstractmethod def PROJECTION_TYPE(self): raise NotImplementedError() @property @abstractmethod def ALLOW_PROJECTION_TYPE(self): return", "prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list", "template class # ============================= class L1Interface(ABC): @property @abstractmethod def PROJECTION_TYPE(self):", "'NA' # Get attrs set unit_name = 'Unit' attrs =", "h5_file, product_id): super().__init__(h5_file, product_id) self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0]", "in prod_name: return self._get_land_water_flag() if 'Lt_' in prod_name: return self._get_Lt(prod_name)", "= dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_DN'][0]] =", "Public # ----------------------------- def get_product_data(self, prod_name:str): if 'Land_water_flag' in prod_name:", "is not data_name: data = data.astype(np.float32) * dset.attrs['Slope'][0] + dset.attrs['Offset'][0]", "'NA' return attrs[unit_name][0].decode('UTF-8') # ============================= # Level-1 map-projection class #", "data = data / cos_theta_0 return data def _get_stray_light_correction_flag(self, prod_name):", "return self._get_Lt(prod_name) if 'Rt_' in prod_name: return self._get_Rt(prod_name) if 'Stray_light_correction_flag_'", "data_name:str, **kwargs): raise NotImplementedError() @abstractmethod def get_geometry_data_list(self): raise NotImplementedError() def", "self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self, prod_name:str): dset", "def _get_Rt(self, prod_name): prod_name = prod_name.replace('Rt_', 'Lt_') dset = self.h5_file['Image_data/'", "dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN return data", "prod_list = super().get_product_data_list() for prod in prod_list: if 'Lt_' in", "# Private # ----------------------------- def _get_land_water_flag(self): dset = self.h5_file['Image_data/Land_water_flag'] data", "import bilin_2d from spot.config import PROJ_TYPE # ============================= # Level-1", "data[data == dset.attrs['Error_value'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value", "interval lon_mode = False if 'Longitude' == data_name: lon_mode =", "data[dn_data == dset.attrs['Error_DN']] = 0 return data > 0 class", "= PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name] def __init__(self, h5_file, product_id):", "= super().get_product_data_list() for prod in prod_list: if 'Lt_' in prod:", "* dset.attrs['Slope'][0] + dset.attrs['Offset'][0] # Finish if interval is none", "prod_list = sorted(prod_list) return prod_list class IRSL1B(L1B): def get_product_data_list(self): prod_list", "np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_value'][0]]", "= self.h5_file['Image_data/Land_water_flag'] data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data", "dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN cos_theta_0 =", "= sorted(prod_list) return prod_list class IRSL1B(L1B): def get_product_data_list(self): prod_list =", "from spot.utility import bilin_2d from spot.config import PROJ_TYPE # =============================", "============================= # Level-1 sub-processing level class # ============================= class L1B(Scene):", "== dset.attrs['Error_DN'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered", "prod_name: return 'NA' # Get attrs set unit_name = 'Unit'", "data_name: data = data.astype(np.float32) * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] # Finish", "abc import ABC, abstractmethod, abstractproperty from spot.utility import bilin_2d from", "data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN return data def _get_Rt(self, prod_name):", "_get_stray_light_correction_flag(self, prod_name): prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset = self.h5_file['Image_data/' +", "= np.NaN cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True))) data = data", "dset.attrs['Offset'][0] return data @abstractmethod def get_geometry_data(self, data_name:str, **kwargs): raise NotImplementedError()", "# ============================= class Scene(L1Interface): PROJECTION_TYPE = PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE,", "PROJECTION_TYPE = PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name] def __init__(self, h5_file,", "encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data <", "from spot.config import PROJ_TYPE # ============================= # Level-1 template class", "* dset.attrs['Slope'][0] + dset.attrs['Offset'][0] return data @abstractmethod def get_geometry_data(self, data_name:str,", "<= data_size_lin) and (self.img_n_pix <= data_size_pxl): data = data[:self.img_n_lin, :self.img_n_pix]", "class # ============================= class L1B(Scene): # ----------------------------- # Public #", "np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_value'][0]] =", "----------------------------- def get_product_data(self, prod_name:str): if 'Land_water_flag' in prod_name: return self._get_land_water_flag()", "class # ============================= class L1Interface(ABC): @property @abstractmethod def PROJECTION_TYPE(self): raise", "none if interval is None or interval == 'none': return", "dset.attrs['Offset_reflectance'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid", "value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_value'][0]] = np.NaN data[data", "# Level-1 template class # ============================= class L1Interface(ABC): @property @abstractmethod", "img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self, data_name: str, **kwargs): interval = kwargs['interval'] dset", "= np.bitwise_and(dn_data, 0x8000) data[dn_data == dset.attrs['Error_DN']] = 0 return data", "spot.config import PROJ_TYPE # ============================= # Level-1 template class #", "if interval is None or interval == 'none': return data", "Scene(L1Interface): PROJECTION_TYPE = PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name] def __init__(self,", "< dset.attrs['Minimum_valid_DN'][0]] = np.NaN cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True))) data", "= img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self, prod_name:str): dset =", "data def get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE #", "cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True))) data = data / cos_theta_0", "dset = self.h5_file['Geometry_data/' + data_name] data = dset[:] if 'Latitude'", "dset[:] # Validate data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs:", "in dset.attrs: data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN # Convert DN", "if interval == 'auto': interp_interval = dset.attrs['Resampling_interval'][0] else: interp_interval =", "== dset.attrs['Error_DN']] = 0 return data > 0 class VNRL1B(L1B):", "decimal import Decimal, ROUND_HALF_UP from abc import ABC, abstractmethod, abstractproperty", "> dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN return", "(data_size_lin, data_size_pxl) = data.shape if (kwargs['fit_img_size'] is True) and (self.img_n_lin", "if the product is QA_flag or Line_tai93 if 'QA_flag' ==", "np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto',", "'Minimum_valid_DN' in dset.attrs: data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN # Convert", "= dset[:] mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data", "============================= # Level-1 map-projection class # ============================= class Scene(L1Interface): PROJECTION_TYPE", "= self.h5_file['Image_data/' + prod_name] dn_data = dset[:] data = np.bitwise_and(dn_data,", "lon_mode = True if interp_interval > 1: data = bilin_2d(data,", "attrs: return 'NA' return attrs[unit_name][0].decode('UTF-8') # ============================= # Level-1 map-projection", "to physical value data = data * dset.attrs['Slope'][0] + dset.attrs['Offset'][0]", "if interval is none if interval is None or interval", "/ cos_theta_0 return data def _get_stray_light_correction_flag(self, prod_name): prod_name = prod_name.replace('Stray_light_correction_flag_',", "lon_mode) # Trim away the excess pixel/line (data_size_lin, data_size_pxl) =", "return super().get_product_data(prod_name) # ----------------------------- # Private # ----------------------------- def _get_land_water_flag(self):", "= product_id geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin =", "return data # Interpolate raw data if interval == 'auto':", "data[data > dset.attrs['Maximum_valid_value'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_value'][0]] = np.NaN", "'Stray_light_correction_flag_' in prod_name: return self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name) # ----------------------------- #", "Get attrs set unit_name = 'Unit' attrs = self.h5_file['/Image_data/' +", "# Get unit if unit_name not in attrs: return 'NA'", "np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') if 'Maximum_valid_DN' in", "super().get_product_data_list() for prod in prod_list: if 'Lt_SW' in prod: prod_list.append(prod.replace('Lt',", "data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_value'][0]]", "pixel/line (data_size_lin, data_size_pxl) = data.shape if (kwargs['fit_img_size'] is True) and", "Validate data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data ==", "def _get_land_water_flag(self): dset = self.h5_file['Image_data/Land_water_flag'] data = dset[:].astype(np.float32) if 'Error_DN'", "class Scene(L1Interface): PROJECTION_TYPE = PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name] def", "= prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data =", "product_id): super().__init__(h5_file, product_id) self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs", "r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_value'][0]] = np.NaN", "**kwargs): raise NotImplementedError() @abstractmethod def get_geometry_data_list(self): raise NotImplementedError() def get_product_data_list(self):", "prod_name:str): dset = self.h5_file['Image_data/' + prod_name] # Return uint16 type", "prod_name: return self._get_Rt(prod_name) if 'Stray_light_correction_flag_' in prod_name: return self._get_stray_light_correction_flag(prod_name) return", "in prod_name: return self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name) # ----------------------------- # Private", "'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask", "+ prod_name] # Return uint16 type data if the product", "prod_list class IRSL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod", "np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') if 'Maximum_valid_DN' in dset.attrs:", "+ prod_name] dn_data = dset[:] data = np.bitwise_and(dn_data, 0x8000) data[dn_data", "> dset.attrs['Maximum_valid_value'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_value'][0]] = np.NaN return", "= data * dset.attrs['Slope'] + dset.attrs['Offset'] data[dn_data == dset.attrs['Error_DN']] =", "and 'Longitude' is not data_name: data = data.astype(np.float32) * dset.attrs['Slope'][0]", "prod_name or 'Line_tai93' == prod_name: return dset[:] # Validate data", "class L1Interface(ABC): @property @abstractmethod def PROJECTION_TYPE(self): raise NotImplementedError() @property @abstractmethod", "ABC, abstractmethod, abstractproperty from spot.utility import bilin_2d from spot.config import", "IRSL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list:", "# Public # ----------------------------- def get_product_data(self, prod_name:str): if 'Land_water_flag' in", "the product is QA_flag or Line_tai93 if 'QA_flag' == prod_name", "return list(self.h5_file['/Image_data'].keys()) def get_unit(self, prod_name: str): if 'Rt_' in prod_name:", "get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list: if 'Lt_SW'", "not data_name: data = data.astype(np.float32) * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] #", "return data > 0 class VNRL1B(L1B): def get_product_data_list(self): prod_list =", "prod_name): prod_name = prod_name.replace('Rt_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name]", "return attrs[unit_name][0].decode('UTF-8') # ============================= # Level-1 map-projection class # =============================", "+ dset.attrs['Offset'][0] return data @abstractmethod def get_geometry_data(self, data_name:str, **kwargs): raise", "Convert DN to physical value data = data * dset.attrs['Slope'][0]", "(self.img_n_lin <= data_size_lin) and (self.img_n_pix <= data_size_pxl): data = data[:self.img_n_lin,", "= np.NaN return data def _get_Lt(self, prod_name): dset = self.h5_file['Image_data/'", "'Error_DN' in dset.attrs: data[data == dset.attrs['Error_value'][0]] = np.NaN with np.warnings.catch_warnings():", "return list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE # ============================= # Level-1", "__init__(self, h5_file, product_id): super().__init__(h5_file, product_id) self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number =", "in attrs: return 'NA' return attrs[unit_name][0].decode('UTF-8') # ============================= # Level-1", "with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') if 'Maximum_valid_DN'", "unit if unit_name not in attrs: return 'NA' return attrs[unit_name][0].decode('UTF-8')", "= np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith',", "interval == 'none': return data # Interpolate raw data if", "if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_value'][0]] = np.NaN with", "product_id) self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs", "def get_geometry_data(self, data_name:str, **kwargs): raise NotImplementedError() @abstractmethod def get_geometry_data_list(self): raise", "self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask = dset.attrs['Mask'][0] data", "abstractproperty from spot.utility import bilin_2d from spot.config import PROJ_TYPE #", "============================= class L1B(Scene): # ----------------------------- # Public # ----------------------------- def", "np.NaN data[data < dset.attrs['Minimum_valid_value'][0]] = np.NaN return data def _get_Lt(self,", "prod_list = super().get_product_data_list() for prod in prod_list: if 'Lt_SW' in", "bilin_2d from spot.config import PROJ_TYPE # ============================= # Level-1 template", "= 0 return data > 0 class VNRL1B(L1B): def get_product_data_list(self):", "product is QA_flag or Line_tai93 if 'QA_flag' == prod_name or", "'Maximum_valid_DN' in dset.attrs: data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN if 'Minimum_valid_DN'", "self.h5_file['Image_data/' + prod_name] # Return uint16 type data if the", "raise NotImplementedError() def get_product_data_list(self): return list(self.h5_file['/Image_data'].keys()) def get_unit(self, prod_name: str):", "np.NaN cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True))) data = data /", "== 'auto': interp_interval = dset.attrs['Resampling_interval'][0] else: interp_interval = interval lon_mode", "Level-1 template class # ============================= class L1Interface(ABC): @property @abstractmethod def", "prod_list: if 'Lt_SW' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list", "== 'none': return data # Interpolate raw data if interval", "in dset.attrs: data[data == dset.attrs['Error_value'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore',", "data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value", "set unit_name = 'Unit' attrs = self.h5_file['/Image_data/' + prod_name].attrs #", "in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return", "0x8000) data[dn_data == dset.attrs['Error_DN']] = 0 return data > 0", "get_product_data_list(self): return list(self.h5_file['/Image_data'].keys()) def get_unit(self, prod_name: str): if 'Rt_' in", "for prod in prod_list: if 'Lt_SW' in prod: prod_list.append(prod.replace('Lt', 'Rt'))", "import Decimal, ROUND_HALF_UP from abc import ABC, abstractmethod, abstractproperty from", "data[data < dset.attrs['Minimum_valid_value'][0]] = np.NaN return data def _get_Lt(self, prod_name):", "super().get_product_data_list() for prod in prod_list: if 'Lt_' in prod: prod_list.append(prod.replace('Lt',", "# Interpolate raw data if interval == 'auto': interp_interval =", "'Rt_' in prod_name: return 'NA' # Get attrs set unit_name", "self.h5_file['Geometry_data'].attrs self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs", "= kwargs['interval'] dset = self.h5_file['Geometry_data/' + data_name] data = dset[:]", "Private # ----------------------------- def _get_land_water_flag(self): dset = self.h5_file['Image_data/Land_water_flag'] data =", "prod in prod_list: if 'Lt_SW' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt',", "uint16 type data if the product is QA_flag or Line_tai93", "is none if interval is None or interval == 'none':", "self.h5_file['Geometry_data/' + data_name] data = dset[:] if 'Latitude' is not", "h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self, data_name:", "True if interp_interval > 1: data = bilin_2d(data, interp_interval, lon_mode)", "data # Interpolate raw data if interval == 'auto': interp_interval", "in dset.attrs: data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN if 'Minimum_valid_DN' in", "# Level-1 sub-processing level class # ============================= class L1B(Scene): #", "data = data * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] return data @abstractmethod", "(greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] =", "None or interval == 'none': return data # Interpolate raw", "dset.attrs['Slope'] + dset.attrs['Offset'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings():", "self.product_id = product_id geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin", "= True if interp_interval > 1: data = bilin_2d(data, interp_interval,", "away the excess pixel/line (data_size_lin, data_size_pxl) = data.shape if (kwargs['fit_img_size']", "data = data * dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance'] data[dn_data == dset.attrs['Error_DN']]", "Decimal, ROUND_HALF_UP from abc import ABC, abstractmethod, abstractproperty from spot.utility", "sub-processing level class # ============================= class L1B(Scene): # ----------------------------- #", "0 class VNRL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod", "dn_data = dset[:] data = np.bitwise_and(dn_data, 0x8000) data[dn_data == dset.attrs['Error_DN']]", "<filename>spot/level1.py import numpy as np import logging from decimal import", "'QA_flag' == prod_name or 'Line_tai93' == prod_name: return dset[:] #", "or interval == 'none': return data # Interpolate raw data", "dset.attrs['Slope'][0] + dset.attrs['Offset'][0] # Finish if interval is none if", "data_name: lon_mode = True if interp_interval > 1: data =", "dset.attrs['Maximum_valid_value'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_value'][0]] = np.NaN return data", "Level-1 sub-processing level class # ============================= class L1B(Scene): # -----------------------------", "< dset.attrs['Minimum_valid_DN'][0]] = np.NaN return data def _get_Rt(self, prod_name): prod_name", "@property @abstractmethod def ALLOW_PROJECTION_TYPE(self): return NotImplementedError() def __init__(self, h5_file, product_id):", "NotImplementedError() def __init__(self, h5_file, product_id): self.h5_file = h5_file self.product_id =", "_get_land_water_flag(self): dset = self.h5_file['Image_data/Land_water_flag'] data = dset[:].astype(np.float32) if 'Error_DN' in", "= data.shape if (kwargs['fit_img_size'] is True) and (self.img_n_lin <= data_size_lin)", "= np.NaN # Convert DN to physical value data =", "NotImplementedError() def get_product_data_list(self): return list(self.h5_file['/Image_data'].keys()) def get_unit(self, prod_name: str): if", "'Rt_' in prod_name: return self._get_Rt(prod_name) if 'Stray_light_correction_flag_' in prod_name: return", "prod_list: if 'Lt_' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list", "def get_unit(self, prod_name: str): if 'Rt_' in prod_name: return 'NA'", "self.ALLOW_PROJECTION_TYPE # ============================= # Level-1 sub-processing level class # =============================", "return data def _get_Rt(self, prod_name): prod_name = prod_name.replace('Rt_', 'Lt_') dset", "== dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered", "Return uint16 type data if the product is QA_flag or", "def get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE # ============================= # Level-1 sub-processing level", "data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True)))", "'Lt_' in prod_name: return self._get_Lt(prod_name) if 'Rt_' in prod_name: return", "> 1: data = bilin_2d(data, interp_interval, lon_mode) # Trim away", "dset.attrs['Slope'][0] + dset.attrs['Offset'][0] return data @abstractmethod def get_geometry_data(self, data_name:str, **kwargs):", "= prod_name.replace('Rt_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data =", "= h5_file self.product_id = product_id geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs self.geo_n_pix =", "np.NaN # Convert DN to physical value data = data", "PROJECTION_TYPE(self): raise NotImplementedError() @property @abstractmethod def ALLOW_PROJECTION_TYPE(self): return NotImplementedError() def", "'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list class IRSL1B(L1B): def get_product_data_list(self):", "False if 'Longitude' == data_name: lon_mode = True if interp_interval", "def get_product_data(self, prod_name:str): if 'Land_water_flag' in prod_name: return self._get_land_water_flag() if", "if 'Rt_' in prod_name: return self._get_Rt(prod_name) if 'Stray_light_correction_flag_' in prod_name:", "Interpolate raw data if interval == 'auto': interp_interval = dset.attrs['Resampling_interval'][0]", "'auto': interp_interval = dset.attrs['Resampling_interval'][0] else: interp_interval = interval lon_mode =", "if unit_name not in attrs: return 'NA' return attrs[unit_name][0].decode('UTF-8') #", "self._get_Lt(prod_name) if 'Rt_' in prod_name: return self._get_Rt(prod_name) if 'Stray_light_correction_flag_' in", "VNRL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list:", "Finish if interval is none if interval is None or", "np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN return data def _get_Rt(self,", "= data * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] return data @abstractmethod def", "prod_name): prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name]", "lon_mode = False if 'Longitude' == data_name: lon_mode = True", "1: data = bilin_2d(data, interp_interval, lon_mode) # Trim away the", "prod_name] dn_data = dset[:] mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data,", "return self._get_Rt(prod_name) if 'Stray_light_correction_flag_' in prod_name: return self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name)", "= self.h5_file['Image_data'].attrs self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self,", "= data / cos_theta_0 return data def _get_stray_light_correction_flag(self, prod_name): prod_name", "return 'NA' # Get attrs set unit_name = 'Unit' attrs", "img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0] def", "Line_tai93 if 'QA_flag' == prod_name or 'Line_tai93' == prod_name: return", "dset.attrs['Error_DN'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in", "# ============================= # Level-1 template class # ============================= class L1Interface(ABC):", "data = np.bitwise_and(dn_data, 0x8000) data[dn_data == dset.attrs['Error_DN']] = 0 return", "def get_product_data(self, prod_name:str): dset = self.h5_file['Image_data/' + prod_name] # Return", "self.h5_file['/Image_data/' + prod_name].attrs # Get unit if unit_name not in", "prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:]", "+ data_name] data = dset[:] if 'Latitude' is not data_name", "dset.attrs: data[data == dset.attrs['Error_value'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid", "self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self, data_name: str, **kwargs): interval =", "dset[:] data = np.bitwise_and(dn_data, 0x8000) data[dn_data == dset.attrs['Error_DN']] = 0", "value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data", "L1Interface(ABC): @property @abstractmethod def PROJECTION_TYPE(self): raise NotImplementedError() @property @abstractmethod def", "(kwargs['fit_img_size'] is True) and (self.img_n_lin <= data_size_lin) and (self.img_n_pix <=", "dset.attrs['Minimum_valid_DN'][0]] = np.NaN return data def _get_Rt(self, prod_name): prod_name =", "in dset.attrs: data[data == dset.attrs['Error_DN'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore',", "get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list: if 'Lt_'", "L1B(Scene): # ----------------------------- # Public # ----------------------------- def get_product_data(self, prod_name:str):", "= self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask = dset.attrs['Mask'][0]", "data.astype(np.float32) * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] # Finish if interval is", "< dset.attrs['Minimum_valid_value'][0]] = np.NaN return data def _get_Lt(self, prod_name): dset", "dset.attrs['Offset'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid", "= self.h5_file['Image_data/' + prod_name] # Return uint16 type data if", "< dset.attrs['Minimum_valid_DN'][0]] = np.NaN # Convert DN to physical value", "QA_flag or Line_tai93 if 'QA_flag' == prod_name or 'Line_tai93' ==", "attrs[unit_name][0].decode('UTF-8') # ============================= # Level-1 map-projection class # ============================= class", "self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name) # ----------------------------- # Private # ----------------------------- def", ":self.img_n_pix] return data def get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self): return", "product_id geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0]", "interval='auto', fit_img_size=True))) data = data / cos_theta_0 return data def", "type data if the product is QA_flag or Line_tai93 if", "data[:self.img_n_lin, :self.img_n_pix] return data def get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self):", "+ dset.attrs['Offset'][0] # Finish if interval is none if interval", "data = data[:self.img_n_lin, :self.img_n_pix] return data def get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys())", "'Latitude' is not data_name and 'Longitude' is not data_name: data", "import numpy as np import logging from decimal import Decimal,", "return data def get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE", "PROJ_TYPE # ============================= # Level-1 template class # ============================= class", "level class # ============================= class L1B(Scene): # ----------------------------- # Public", "bilin_2d(data, interp_interval, lon_mode) # Trim away the excess pixel/line (data_size_lin,", "= np.NaN data[data < dset.attrs['Minimum_valid_value'][0]] = np.NaN return data def", "# ============================= # Level-1 sub-processing level class # ============================= class", "== prod_name: return dset[:] # Validate data = dset[:].astype(np.float32) if", "encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_value'][0]] = np.NaN data[data <", "data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_DN'][0]]", "return self._get_land_water_flag() if 'Lt_' in prod_name: return self._get_Lt(prod_name) if 'Rt_'", "0 return data > 0 class VNRL1B(L1B): def get_product_data_list(self): prod_list", "in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]]", "interval is none if interval is None or interval ==", "dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings():", "self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0] def", "prod_name: str): if 'Rt_' in prod_name: return 'NA' # Get", "============================= class Scene(L1Interface): PROJECTION_TYPE = PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name]", "DN to physical value data = data * dset.attrs['Slope'][0] +", "list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE # ============================= # Level-1 sub-processing", "return self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name) # ----------------------------- # Private # -----------------------------", "= self.h5_file['Image_data'].attrs self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self, data_name: str, **kwargs):", "if 'Maximum_valid_DN' in dset.attrs: data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN if", "= dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data *", "# Finish if interval is none if interval is None", "not data_name and 'Longitude' is not data_name: data = data.astype(np.float32)", "data_name: str, **kwargs): interval = kwargs['interval'] dset = self.h5_file['Geometry_data/' +", "data * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] return data @abstractmethod def get_geometry_data(self,", "> dset.attrs['Maximum_valid_DN'][0]] = np.NaN if 'Minimum_valid_DN' in dset.attrs: data[data <", "'Error_DN' in dset.attrs: data[data == dset.attrs['Error_DN'][0]] = np.NaN with np.warnings.catch_warnings():", "if 'Land_water_flag' in prod_name: return self._get_land_water_flag() if 'Lt_' in prod_name:", "(greater|less)') data[data > dset.attrs['Maximum_valid_value'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_value'][0]] =", "dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope_reflectance']", "class IRSL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in", "**kwargs): interval = kwargs['interval'] dset = self.h5_file['Geometry_data/' + data_name] data", "return prod_list class IRSL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for", "import PROJ_TYPE # ============================= # Level-1 template class # =============================", "get_geometry_data(self, data_name:str, **kwargs): raise NotImplementedError() @abstractmethod def get_geometry_data_list(self): raise NotImplementedError()", "data_size_pxl) = data.shape if (kwargs['fit_img_size'] is True) and (self.img_n_lin <=", "if 'Lt_' in prod_name: return self._get_Lt(prod_name) if 'Rt_' in prod_name:", "str): if 'Rt_' in prod_name: return 'NA' # Get attrs", "= self.h5_file['Geometry_data'].attrs self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs =", "dn_data = dset[:] mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32)", "# Level-1 map-projection class # ============================= class Scene(L1Interface): PROJECTION_TYPE =", "super().get_product_data(prod_name) # ----------------------------- # Private # ----------------------------- def _get_land_water_flag(self): dset", "list(self.h5_file['/Image_data'].keys()) def get_unit(self, prod_name: str): if 'Rt_' in prod_name: return", "prod in prod_list: if 'Lt_' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt',", "= h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self,", "data_name and 'Longitude' is not data_name: data = data.astype(np.float32) *", "is not data_name and 'Longitude' is not data_name: data =", "if (kwargs['fit_img_size'] is True) and (self.img_n_lin <= data_size_lin) and (self.img_n_pix", "interp_interval = interval lon_mode = False if 'Longitude' == data_name:", "physical value data = data * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] return", "__init__(self, h5_file, product_id): self.h5_file = h5_file self.product_id = product_id geo_data_grp_attrs", "value encountered in (greater|less)') if 'Maximum_valid_DN' in dset.attrs: data[data >", "= img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self, prod_name:str): dset = self.h5_file['Image_data/' + prod_name]", "'Lt_SW' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list)", "is QA_flag or Line_tai93 if 'QA_flag' == prod_name or 'Line_tai93'", "# ----------------------------- def _get_land_water_flag(self): dset = self.h5_file['Image_data/Land_water_flag'] data = dset[:].astype(np.float32)", "h5_file, product_id): self.h5_file = h5_file self.product_id = product_id geo_data_grp_attrs =", "np.NaN return data def _get_Rt(self, prod_name): prod_name = prod_name.replace('Rt_', 'Lt_')", "if interp_interval > 1: data = bilin_2d(data, interp_interval, lon_mode) #", "+ prod_name] dn_data = dset[:] mask = dset.attrs['Mask'][0] data =", "interp_interval = dset.attrs['Resampling_interval'][0] else: interp_interval = interval lon_mode = False", "= 'Unit' attrs = self.h5_file['/Image_data/' + prod_name].attrs # Get unit", "import logging from decimal import Decimal, ROUND_HALF_UP from abc import", "= img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self, data_name: str, **kwargs): interval = kwargs['interval']", "= dset.attrs['Resampling_interval'][0] else: interp_interval = interval lon_mode = False if", "in prod_name: return self._get_Lt(prod_name) if 'Rt_' in prod_name: return self._get_Rt(prod_name)", "is True) and (self.img_n_lin <= data_size_lin) and (self.img_n_pix <= data_size_pxl):", "prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list class IRSL1B(L1B): def", "is None or interval == 'none': return data # Interpolate", "prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data", "data_name] data = dset[:] if 'Latitude' is not data_name and", "mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data", "interval = kwargs['interval'] dset = self.h5_file['Geometry_data/' + data_name] data =", "r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN", "geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs", "in prod_name: return 'NA' # Get attrs set unit_name =", "get_geometry_data(self, data_name: str, **kwargs): interval = kwargs['interval'] dset = self.h5_file['Geometry_data/'", "dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope']", "for prod in prod_list: if 'Lt_' in prod: prod_list.append(prod.replace('Lt', 'Rt'))", "raw data if interval == 'auto': interp_interval = dset.attrs['Resampling_interval'][0] else:", "@abstractmethod def get_geometry_data(self, data_name:str, **kwargs): raise NotImplementedError() @abstractmethod def get_geometry_data_list(self):", "data if interval == 'auto': interp_interval = dset.attrs['Resampling_interval'][0] else: interp_interval", "def PROJECTION_TYPE(self): raise NotImplementedError() @property @abstractmethod def ALLOW_PROJECTION_TYPE(self): return NotImplementedError()", "dset.attrs: data[data == dset.attrs['Error_DN'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid", "in prod_name: return self._get_Rt(prod_name) if 'Stray_light_correction_flag_' in prod_name: return self._get_stray_light_correction_flag(prod_name)", "# ============================= class L1Interface(ABC): @property @abstractmethod def PROJECTION_TYPE(self): raise NotImplementedError()", "kwargs['interval'] dset = self.h5_file['Geometry_data/' + data_name] data = dset[:] if", "dset.attrs['Minimum_valid_value'][0]] = np.NaN return data def _get_Lt(self, prod_name): dset =", "raise NotImplementedError() @abstractmethod def get_geometry_data_list(self): raise NotImplementedError() def get_product_data_list(self): return", "dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask =", "in (greater|less)') data[data > dset.attrs['Maximum_valid_value'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_value'][0]]", "np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True))) data = data / cos_theta_0 return data", "dset[:] if 'Latitude' is not data_name and 'Longitude' is not", "<= data_size_pxl): data = data[:self.img_n_lin, :self.img_n_pix] return data def get_geometry_data_list(self):", "== dset.attrs['Error_value'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered", "# Get attrs set unit_name = 'Unit' attrs = self.h5_file['/Image_data/'", "get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE # ============================= # Level-1 sub-processing level class", "'Longitude' == data_name: lon_mode = True if interp_interval > 1:", "np.NaN if 'Minimum_valid_DN' in dset.attrs: data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN", "if 'Latitude' is not data_name and 'Longitude' is not data_name:", "data * dset.attrs['Slope'] + dset.attrs['Offset'] data[dn_data == dset.attrs['Error_DN']] = np.NaN", "= np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)')", "'Longitude' is not data_name: data = data.astype(np.float32) * dset.attrs['Slope'][0] +", "get_unit(self, prod_name: str): if 'Rt_' in prod_name: return 'NA' #", "* dset.attrs['Slope'] + dset.attrs['Offset'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with", "np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope'] + dset.attrs['Offset'] data[dn_data", "sorted(prod_list) return prod_list class IRSL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list()", "return NotImplementedError() def __init__(self, h5_file, product_id): self.h5_file = h5_file self.product_id", "dset.attrs['Maximum_valid_DN'][0]] = np.NaN if 'Minimum_valid_DN' in dset.attrs: data[data < dset.attrs['Minimum_valid_DN'][0]]", "if 'QA_flag' == prod_name or 'Line_tai93' == prod_name: return dset[:]", "data def _get_Rt(self, prod_name): prod_name = prod_name.replace('Rt_', 'Lt_') dset =", "np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance'] data[dn_data", "not in attrs: return 'NA' return attrs[unit_name][0].decode('UTF-8') # ============================= #", "prod_name: return self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name) # ----------------------------- # Private #", "= np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN return data def", "dset.attrs['Offset'][0] # Finish if interval is none if interval is", "data = dset[:] if 'Latitude' is not data_name and 'Longitude'", "data.shape if (kwargs['fit_img_size'] is True) and (self.img_n_lin <= data_size_lin) and", "prod_name.replace('Rt_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:]", "return data def _get_stray_light_correction_flag(self, prod_name): prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset", "self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_n_pix", "numpy as np import logging from decimal import Decimal, ROUND_HALF_UP", "if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_DN'][0]] = np.NaN with", "if 'Minimum_valid_DN' in dset.attrs: data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN #", "= np.NaN if 'Minimum_valid_DN' in dset.attrs: data[data < dset.attrs['Minimum_valid_DN'][0]] =", "attrs set unit_name = 'Unit' attrs = self.h5_file['/Image_data/' + prod_name].attrs", "str, **kwargs): interval = kwargs['interval'] dset = self.h5_file['Geometry_data/' + data_name]", "prod_name): dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask", "r'invalid value encountered in (greater|less)') if 'Maximum_valid_DN' in dset.attrs: data[data", "> dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN cos_theta_0", "product_id): self.h5_file = h5_file self.product_id = product_id geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs", "data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN # Convert DN to physical", "= interval lon_mode = False if 'Longitude' == data_name: lon_mode", "dset = self.h5_file['Image_data/' + prod_name] # Return uint16 type data", "logging from decimal import Decimal, ROUND_HALF_UP from abc import ABC,", "interval == 'auto': interp_interval = dset.attrs['Resampling_interval'][0] else: interp_interval = interval", "# ============================= class L1B(Scene): # ----------------------------- # Public # -----------------------------", "+ dset.attrs['Offset_reflectance'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore',", "(greater|less)') if 'Maximum_valid_DN' in dset.attrs: data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN", "class VNRL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in", "= data.astype(np.float32) * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] # Finish if interval", "or Line_tai93 if 'QA_flag' == prod_name or 'Line_tai93' == prod_name:", "interval is None or interval == 'none': return data #", "self.h5_file['Image_data'].attrs self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self, data_name: str, **kwargs): interval", "============================= # Level-1 template class # ============================= class L1Interface(ABC): @property", "data @abstractmethod def get_geometry_data(self, data_name:str, **kwargs): raise NotImplementedError() @abstractmethod def", "= np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope'] + dset.attrs['Offset']", "map-projection class # ============================= class Scene(L1Interface): PROJECTION_TYPE = PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE", "from decimal import Decimal, ROUND_HALF_UP from abc import ABC, abstractmethod,", "'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list class IRSL1B(L1B):", "# Convert DN to physical value data = data *", "def get_geometry_data(self, data_name: str, **kwargs): interval = kwargs['interval'] dset =", "np.NaN return data def _get_Lt(self, prod_name): dset = self.h5_file['Image_data/' +", "ALLOW_PROJECTION_TYPE(self): return NotImplementedError() def __init__(self, h5_file, product_id): self.h5_file = h5_file", "value data = data * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] return data", "data = data * dset.attrs['Slope'] + dset.attrs['Offset'] data[dn_data == dset.attrs['Error_DN']]", "============================= class L1Interface(ABC): @property @abstractmethod def PROJECTION_TYPE(self): raise NotImplementedError() @property", "dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in", "= dset[:] if 'Latitude' is not data_name and 'Longitude' is", "spot.utility import bilin_2d from spot.config import PROJ_TYPE # ============================= #", "= dset[:] data = np.bitwise_and(dn_data, 0x8000) data[dn_data == dset.attrs['Error_DN']] =", "in (greater|less)') if 'Maximum_valid_DN' in dset.attrs: data[data > dset.attrs['Maximum_valid_DN'][0]] =", "self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_spatial_reso", "prod_name: return self._get_Lt(prod_name) if 'Rt_' in prod_name: return self._get_Rt(prod_name) if", "= data * dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance'] data[dn_data == dset.attrs['Error_DN']] =", "= bilin_2d(data, interp_interval, lon_mode) # Trim away the excess pixel/line", "data[data == dset.attrs['Error_DN'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value", "super().__init__(h5_file, product_id) self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs =", "def _get_Lt(self, prod_name): dset = self.h5_file['Image_data/' + prod_name] dn_data =", "data * dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance'] data[dn_data == dset.attrs['Error_DN']] = np.NaN", "----------------------------- # Private # ----------------------------- def _get_land_water_flag(self): dset = self.h5_file['Image_data/Land_water_flag']", "= self.h5_file['Geometry_data/' + data_name] data = dset[:] if 'Latitude' is", "fit_img_size=True))) data = data / cos_theta_0 return data def _get_stray_light_correction_flag(self,", "prod_name].attrs # Get unit if unit_name not in attrs: return", "img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self, prod_name:str): dset = self.h5_file['Image_data/' + prod_name] #", "or 'Line_tai93' == prod_name: return dset[:] # Validate data =", "else: interp_interval = interval lon_mode = False if 'Longitude' ==", "encountered in (greater|less)') if 'Maximum_valid_DN' in dset.attrs: data[data > dset.attrs['Maximum_valid_DN'][0]]", "prod_name] # Return uint16 type data if the product is", "+ prod_name].attrs # Get unit if unit_name not in attrs:", "get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE # ============================= #", "data if the product is QA_flag or Line_tai93 if 'QA_flag'", "data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN if 'Minimum_valid_DN' in dset.attrs: data[data", "geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0]", "== data_name: lon_mode = True if interp_interval > 1: data", "if 'Rt_' in prod_name: return 'NA' # Get attrs set", "----------------------------- # Public # ----------------------------- def get_product_data(self, prod_name:str): if 'Land_water_flag'", "# ============================= # Level-1 map-projection class # ============================= class Scene(L1Interface):", "np import logging from decimal import Decimal, ROUND_HALF_UP from abc", "abstractmethod, abstractproperty from spot.utility import bilin_2d from spot.config import PROJ_TYPE", "dset.attrs: data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN # Convert DN to", "cos_theta_0 return data def _get_stray_light_correction_flag(self, prod_name): prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_')", "h5_file self.product_id = product_id geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0]", "self.h5_file['Image_data/' + prod_name] dn_data = dset[:] data = np.bitwise_and(dn_data, 0x8000)", "NotImplementedError() @property @abstractmethod def ALLOW_PROJECTION_TYPE(self): return NotImplementedError() def __init__(self, h5_file,", "get_product_data(self, prod_name:str): dset = self.h5_file['Image_data/' + prod_name] # Return uint16", "prod_name: return dset[:] # Validate data = dset[:].astype(np.float32) if 'Error_DN'", "data_size_pxl): data = data[:self.img_n_lin, :self.img_n_pix] return data def get_geometry_data_list(self): return", "self.h5_file['Image_data/Land_water_flag'] data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data ==", "== prod_name or 'Line_tai93' == prod_name: return dset[:] # Validate", "if 'Lt_' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list =", "get_geometry_data_list(self): raise NotImplementedError() def get_product_data_list(self): return list(self.h5_file['/Image_data'].keys()) def get_unit(self, prod_name:", "> 0 class VNRL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for", "dset.attrs['Resampling_interval'][0] else: interp_interval = interval lon_mode = False if 'Longitude'", "in prod_list: if 'Lt_' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag'))", "Get unit if unit_name not in attrs: return 'NA' return", "img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self, data_name: str,", "Trim away the excess pixel/line (data_size_lin, data_size_pxl) = data.shape if", "'Unit' attrs = self.h5_file['/Image_data/' + prod_name].attrs # Get unit if", "'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list # EOF", "def ALLOW_PROJECTION_TYPE(self): return NotImplementedError() def __init__(self, h5_file, product_id): self.h5_file =", "= np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True))) data = data / cos_theta_0 return", "data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope'] +", "return dset[:] # Validate data = dset[:].astype(np.float32) if 'Error_DN' in", "@property @abstractmethod def PROJECTION_TYPE(self): raise NotImplementedError() @property @abstractmethod def ALLOW_PROJECTION_TYPE(self):", "dset[:] mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data =", "geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0]", "dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] data =", "as np import logging from decimal import Decimal, ROUND_HALF_UP from", "img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self, prod_name:str): dset = self.h5_file['Image_data/'", "np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') if", "True) and (self.img_n_lin <= data_size_lin) and (self.img_n_pix <= data_size_pxl): data", "_get_Lt(self, prod_name): dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:]", "mask).astype(np.float32) data = data * dset.attrs['Slope'] + dset.attrs['Offset'] data[dn_data ==", "dset.attrs['Minimum_valid_DN'][0]] = np.NaN cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True))) data =", "prod_name = prod_name.replace('Rt_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data", "self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self, prod_name:str): dset = self.h5_file['Image_data/' +", "# Return uint16 type data if the product is QA_flag", "prod_name] dn_data = dset[:] data = np.bitwise_and(dn_data, 0x8000) data[dn_data ==", "self.h5_file['Image_data'].attrs self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self, prod_name:str):", "= np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance']", "return 'NA' return attrs[unit_name][0].decode('UTF-8') # ============================= # Level-1 map-projection class", "import ABC, abstractmethod, abstractproperty from spot.utility import bilin_2d from spot.config", "# Trim away the excess pixel/line (data_size_lin, data_size_pxl) = data.shape", "def get_geometry_data_list(self): raise NotImplementedError() def get_product_data_list(self): return list(self.h5_file['/Image_data'].keys()) def get_unit(self,", "if 'Stray_light_correction_flag_' in prod_name: return self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name) # -----------------------------", "dset.attrs['Error_DN']] = 0 return data > 0 class VNRL1B(L1B): def", "def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list: if", "h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0]", "# ----------------------------- def get_product_data(self, prod_name:str): if 'Land_water_flag' in prod_name: return", "ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name] def __init__(self, h5_file, product_id): super().__init__(h5_file, product_id)", "class # ============================= class Scene(L1Interface): PROJECTION_TYPE = PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE =", "dset.attrs['Minimum_valid_DN'][0]] = np.NaN # Convert DN to physical value data", "if 'Lt_SW' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list =", "attrs = self.h5_file['/Image_data/' + prod_name].attrs # Get unit if unit_name", "'Land_water_flag' in prod_name: return self._get_land_water_flag() if 'Lt_' in prod_name: return", "def __init__(self, h5_file, product_id): self.h5_file = h5_file self.product_id = product_id", "_get_Rt(self, prod_name): prod_name = prod_name.replace('Rt_', 'Lt_') dset = self.h5_file['Image_data/' +", "the excess pixel/line (data_size_lin, data_size_pxl) = data.shape if (kwargs['fit_img_size'] is", "self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin", "* dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with", "self.h5_file = h5_file self.product_id = product_id geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs self.geo_n_pix", "= np.NaN return data def _get_Rt(self, prod_name): prod_name = prod_name.replace('Rt_',", "----------------------------- def _get_land_water_flag(self): dset = self.h5_file['Image_data/Land_water_flag'] data = dset[:].astype(np.float32) if", "= data[:self.img_n_lin, :self.img_n_pix] return data def get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys()) def", "= geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_n_pix =", "in prod_list: if 'Lt_SW' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag'))", "= [PROJECTION_TYPE, PROJ_TYPE.EQR.name] def __init__(self, h5_file, product_id): super().__init__(h5_file, product_id) self.scene_number", "= self.h5_file['/Image_data/' + prod_name].attrs # Get unit if unit_name not", "excess pixel/line (data_size_lin, data_size_pxl) = data.shape if (kwargs['fit_img_size'] is True)", "data / cos_theta_0 return data def _get_stray_light_correction_flag(self, prod_name): prod_name =", "prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list class", "return data @abstractmethod def get_geometry_data(self, data_name:str, **kwargs): raise NotImplementedError() @abstractmethod", "@abstractmethod def ALLOW_PROJECTION_TYPE(self): return NotImplementedError() def __init__(self, h5_file, product_id): self.h5_file", "def _get_stray_light_correction_flag(self, prod_name): prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset = self.h5_file['Image_data/'", "ROUND_HALF_UP from abc import ABC, abstractmethod, abstractproperty from spot.utility import", "def get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE # =============================", "np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] =", "np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]]", "with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data >", "class L1B(Scene): # ----------------------------- # Public # ----------------------------- def get_product_data(self,", "= dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_value'][0]] =", "from abc import ABC, abstractmethod, abstractproperty from spot.utility import bilin_2d", "@abstractmethod def get_geometry_data_list(self): raise NotImplementedError() def get_product_data_list(self): return list(self.h5_file['/Image_data'].keys()) def", "= False if 'Longitude' == data_name: lon_mode = True if", "if 'Longitude' == data_name: lon_mode = True if interp_interval >", "and (self.img_n_pix <= data_size_pxl): data = data[:self.img_n_lin, :self.img_n_pix] return data", "unit_name not in attrs: return 'NA' return attrs[unit_name][0].decode('UTF-8') # =============================", "(self.img_n_pix <= data_size_pxl): data = data[:self.img_n_lin, :self.img_n_pix] return data def", "dset.attrs['Error_value'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in", "interp_interval, lon_mode) # Trim away the excess pixel/line (data_size_lin, data_size_pxl)", "mask).astype(np.float32) data = data * dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance'] data[dn_data ==", "'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] data", "Level-1 map-projection class # ============================= class Scene(L1Interface): PROJECTION_TYPE = PROJ_TYPE.SCENE.name", "= super().get_product_data_list() for prod in prod_list: if 'Lt_SW' in prod:", "return data def _get_Lt(self, prod_name): dset = self.h5_file['Image_data/' + prod_name]", "+ dset.attrs['Offset'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore',", "dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_DN'][0]] = np.NaN", "# ----------------------------- # Public # ----------------------------- def get_product_data(self, prod_name:str): if", "unit_name = 'Unit' attrs = self.h5_file['/Image_data/' + prod_name].attrs # Get", "data def _get_Lt(self, prod_name): dset = self.h5_file['Image_data/' + prod_name] dn_data", "data > 0 class VNRL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list()", "dset.attrs: data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN if 'Minimum_valid_DN' in dset.attrs:", "= h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_spatial_reso =", "np.bitwise_and(dn_data, 0x8000) data[dn_data == dset.attrs['Error_DN']] = 0 return data >", "prod_name:str): if 'Land_water_flag' in prod_name: return self._get_land_water_flag() if 'Lt_' in", "data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope_reflectance'] +", "interp_interval > 1: data = bilin_2d(data, interp_interval, lon_mode) # Trim", "def __init__(self, h5_file, product_id): super().__init__(h5_file, product_id) self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number", "def get_product_data_list(self): return list(self.h5_file['/Image_data'].keys()) def get_unit(self, prod_name: str): if 'Rt_'", "PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name] def __init__(self, h5_file, product_id): super().__init__(h5_file,", "[PROJECTION_TYPE, PROJ_TYPE.EQR.name] def __init__(self, h5_file, product_id): super().__init__(h5_file, product_id) self.scene_number =", "get_product_data(self, prod_name:str): if 'Land_water_flag' in prod_name: return self._get_land_water_flag() if 'Lt_'", "'Line_tai93' == prod_name: return dset[:] # Validate data = dset[:].astype(np.float32)", "self._get_Rt(prod_name) if 'Stray_light_correction_flag_' in prod_name: return self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name) #", "NotImplementedError() @abstractmethod def get_geometry_data_list(self): raise NotImplementedError() def get_product_data_list(self): return list(self.h5_file['/Image_data'].keys())", "dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_value'][0]] = np.NaN", "data def _get_stray_light_correction_flag(self, prod_name): prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset =", "return self.ALLOW_PROJECTION_TYPE # ============================= # Level-1 sub-processing level class #", "= geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin =", "PROJ_TYPE.EQR.name] def __init__(self, h5_file, product_id): super().__init__(h5_file, product_id) self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0]", "'none': return data # Interpolate raw data if interval ==", "dset = self.h5_file['Image_data/Land_water_flag'] data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs:", "data = data.astype(np.float32) * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] # Finish if", "data_size_lin) and (self.img_n_pix <= data_size_pxl): data = data[:self.img_n_lin, :self.img_n_pix] return", "# Validate data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data", "prod_name: return self._get_land_water_flag() if 'Lt_' in prod_name: return self._get_Lt(prod_name) if", "data = bilin_2d(data, interp_interval, lon_mode) # Trim away the excess", "raise NotImplementedError() @property @abstractmethod def ALLOW_PROJECTION_TYPE(self): return NotImplementedError() def __init__(self,", "# ----------------------------- # Private # ----------------------------- def _get_land_water_flag(self): dset =", "'Lt_' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list)", "prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list #", "np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data", "data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN", "self._get_land_water_flag() if 'Lt_' in prod_name: return self._get_Lt(prod_name) if 'Rt_' in" ]
[ "string def convertToTitle(self, n: int) -> str: capitals = [chr(x)", "class Solution: # @return a string def convertToTitle(self, n: int)", "ord('Z')+1)] result = [] while n > 0: result.insert(0, capitals[(n-1)%len(capitals)])", "capitals[(n-1)%len(capitals)]) n = (n-1) % len(capitals) # result.reverse() return ''.join(result)", "n > 0: result.insert(0, capitals[(n-1)%len(capitals)]) n = (n-1) % len(capitals)", "-> str: capitals = [chr(x) for x in range(ord('A'), ord('Z')+1)]", "str: capitals = [chr(x) for x in range(ord('A'), ord('Z')+1)] result", "0: result.insert(0, capitals[(n-1)%len(capitals)]) n = (n-1) % len(capitals) # result.reverse()", "[chr(x) for x in range(ord('A'), ord('Z')+1)] result = [] while", "[] while n > 0: result.insert(0, capitals[(n-1)%len(capitals)]) n = (n-1)", "> 0: result.insert(0, capitals[(n-1)%len(capitals)]) n = (n-1) % len(capitals) #", "while n > 0: result.insert(0, capitals[(n-1)%len(capitals)]) n = (n-1) %", "for x in range(ord('A'), ord('Z')+1)] result = [] while n", "Solution: # @return a string def convertToTitle(self, n: int) ->", "result.insert(0, capitals[(n-1)%len(capitals)]) n = (n-1) % len(capitals) # result.reverse() return", "capitals = [chr(x) for x in range(ord('A'), ord('Z')+1)] result =", "a string def convertToTitle(self, n: int) -> str: capitals =", "= [chr(x) for x in range(ord('A'), ord('Z')+1)] result = []", "@return a string def convertToTitle(self, n: int) -> str: capitals", "range(ord('A'), ord('Z')+1)] result = [] while n > 0: result.insert(0,", "x in range(ord('A'), ord('Z')+1)] result = [] while n >", "in range(ord('A'), ord('Z')+1)] result = [] while n > 0:", "def convertToTitle(self, n: int) -> str: capitals = [chr(x) for", "# @return a string def convertToTitle(self, n: int) -> str:", "result = [] while n > 0: result.insert(0, capitals[(n-1)%len(capitals)]) n", "int) -> str: capitals = [chr(x) for x in range(ord('A'),", "n: int) -> str: capitals = [chr(x) for x in", "convertToTitle(self, n: int) -> str: capitals = [chr(x) for x", "= [] while n > 0: result.insert(0, capitals[(n-1)%len(capitals)]) n =" ]
[ "the variable. \"\"\" def shrink(value): parts = (x and SingleQuote(x)", "queue.Empty: yield None finally: try: if process.returncode is None: #", "return '\"' + s.replace('\"', '\\\\\"') + '\"' def ShrinkToSnippet(cmd_parts, var_name,", "indicating whether to check the exit status of the process", "(exit code, output). Raises: TimeoutError on timeout. \"\"\" _ValidateAndLogCommand(args, cwd,", "preexec_fn = lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL) if six.PY2: return subprocess.Popen( args=args,", "some output there # will be multiple encodings (e.g. adb", "def StartCmd(args, cwd=None, shell=False, env=None): \"\"\"Starts a subprocess and returns", "read (and thus yield) at once. poll_interval: The length of", "== 'win32' else _IterProcessStdoutFcntl) \"\"\"Iterate over a process's stdout. This", "x in value.split(var_value)) with_substitutions = ('\"$%s\"' % var_name).join(parts) return with_substitutions", "'\"\"' elif all(c in _SafeShellChars for c in s): return", "and in some output there # will be multiple encodings", "read. Yields: The output of the subprocess, line by line.", "status. \"\"\" cmd = _ValidateAndLogCommand(args, cwd, shell) process = Popen(", "'third_party', 'six') if SIX_PATH not in sys.path: sys.path.append(SIX_PATH) import six", "shell=True') else: if shell: raise Exception('array args must be run", "iter_timeout: An optional length of time, in seconds, to wait", "to check the exit status of the process after all", "'win32': close_fds = (stdin is None and stdout is None", "= select.select([child_fd], [], [], iter_aware_poll_interval) if child_fd in read_fds: data", "import sys import time CATAPULT_ROOT_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..',", "seconds. cwd: If not None, the subprocess's current directory will", "sys.path.append(SIX_PATH) import six from devil import base_error logger = logging.getLogger(__name__)", "s: return '\"\"' elif all(c in _SafeShellChars for c in", "length of time in the iteration may take precedence. Raises:", "None, ( 'Iteration received no data despite no iter_timeout being", "Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None): pipe = Popen( args,", "and the process does not complete. Yields: basestrings of data", "threading.Thread(target=read_process_stdout) reader_thread.start() end_time = (time.time() + timeout) if timeout else", "the subprocess, line by line. Raises: CalledProcessError if check_status is", "for c in args) if cwd is None: cwd =", "cwd=None, shell=False, logfile=None, env=None): \"\"\"Executes a subprocess with a timeout.", "string using single quotes. Reliably quote a string which may", "shrink(value): parts = (x and SingleQuote(x) for x in value.split(var_value))", "single quotes. Reliably quote a string which may contain unsafe", "env=None, merge_stderr=False): \"\"\"Executes a subprocess and returns its exit code", "raw data. buffer_output += data has_incomplete_line = buffer_output[-1] not in", "in calls to `select.select`. If iter_timeout is set, the remaining", "def SingleQuote(s): \"\"\"Return an shell-escaped version of the string using", "with shell=True') else: if shell: raise Exception('array args must be", "that caller will not crash due to # uncaught exception.", "cwd=None, shell=False, env=None, check_status=True): \"\"\"Executes a subprocess and continuously yields", "import string import subprocess import sys import time CATAPULT_ROOT_PATH =", "(cwd or '')) return Call(args, cwd=cwd) def GetCmdOutput(args, cwd=None, shell=False,", "into account all quoting that needs to happen. Args: cmd_parts:", "__init__(self, output=None): super(TimeoutError, self).__init__('Timeout') self._output = output @property def output(self):", "for _IterProcessStdoutFcntl on all platforms. \"\"\" # pylint: disable=unused-argument if", "cwd=cwd, env=env) pipe.communicate() return pipe.wait() def RunCmd(args, cwd=None): \"\"\"Opens a", "is None and stderr is None) preexec_fn = None else:", "stderr=subprocess.STDOUT, env=env) try: for data in _IterProcessStdout(process, timeout=timeout): if logfile:", "logfile: logfile.write(data) output.write(data) except TimeoutError: raise TimeoutError(output.getvalue()) str_output = output.getvalue()", "if process.poll() is not None: # If process is closed,", "command using a variable to shrink it. Takes into account", "out. cur_iter_timeout = iter_end - time.time() if data is None", "fl | os.O_NONBLOCK) end_time = (time.time() + timeout) if timeout", "of time in the iteration may take precedence. Raises: TimeoutError:", "a string which may contain unsafe characters (e.g. space or", "not complete. Yields: basestrings of data or None. \"\"\" def", "0: yield None iter_end = time.time() + iter_timeout continue else:", "defines environment variables for the subprocess. Returns: Captures and returns", "logger.debug('[host]%s> %s', cwd, args) return args def GetCmdStatusAndOutput(args, cwd=None, shell=False,", "iter_timeout: iter_end = time.time() + iter_timeout cur_iter_timeout = iter_timeout for", "iter_timeout else None while True: if end_time and time.time() >", "is a suitable replacement for _IterProcessStdoutFcntl on all platforms. \"\"\"", "environment variables for the subprocess. Returns: A process handle from", "that defines environment variables for the subprocess. merge_stderr: If True,", "using single quotes. \"\"\" return pipes.quote(s) def DoubleQuote(s): \"\"\"Return an", "raise TimeoutError() try: s = stdout_queue.get(True, iter_timeout) if not s:", "subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn", "may contain unsafe characters (e.g. space or quote characters), while", "True, captures stderr as part of stdout. Returns: The 2-tuple", "DoubleQuote(s): \"\"\"Return an shell-escaped version of the string using double", "+ iter_timeout) if iter_timeout else None while True: if end_time", "else: iter_aware_poll_interval = poll_interval read_fds, _, _ = select.select([child_fd], [],", "output, and errors. Args: args: A string or a sequence", "+ iter_timeout if buffer_output: yield buffer_output if check_status and process.returncode:", "pass process.wait() def _IterProcessStdoutQueue(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"A Queue.Queue-based", "check_status: A boolean indicating whether to check the exit status", "cwd=cwd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) try: for data in _IterProcessStdout(process,", "mismatch error. # Ignore any decoding error, so that caller", "uncaught exception. Decoding errors are unavoidable, as we # do", "output_chunk = _read_and_decode(process.stdout.fileno(), buffer_size) except IOError: break stdout_queue.put(output_chunk, True) if", "subprocess and returns a handle to the process. Args: args:", "is intentionally not public. Args: process: The process in question.", "raise TimeoutError() if iter_end_time and time.time() > iter_end_time: yield None", "variables for the subprocess. Returns: A process handle from subprocess.Popen.", "process.returncode is None: # Make sure the process doesn't stick", "c in s): return s else: return '\"' + s.replace('\"',", "data = _read_and_decode(child_fd, buffer_size) if not data: break yield data", "if args is a sequence. env: If not None, a", "process.wait() def _IterProcessStdoutQueue(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"A Queue.Queue-based implementation", "SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six') if SIX_PATH not in sys.path:", "cwd, shell) return Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell, cwd=cwd, env=env)", "var_value): \"\"\"Constructs a shell snippet for a command using a", "stderr). \"\"\" _ValidateAndLogCommand(args, cwd, shell) stderr = subprocess.STDOUT if merge_stderr", "the program to execute is the first element. timeout: the", "= (time.time() + iter_timeout) if iter_timeout else None while True:", "that can be # found in the LICENSE file. \"\"\"A", "close_fds=close_fds, env=env, preexec_fn=preexec_fn ) else: # opens stdout in text", "output. Args: args: List of arguments to the program, the", "may depend on the shell implementation. This set usually includes:", "_IterProcessStdout(process, timeout=timeout): if logfile: logfile.write(data) output.write(data) except TimeoutError: raise TimeoutError(output.getvalue())", "of the process after all output has been read. Yields:", "check_status=True): buffer_output = '' iter_end = None cur_iter_timeout = None", "GetCmdStatusAndOutput(args, cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes a subprocess and returns", "command's stderr to logger (which defaults to stdout). \"\"\" (_,", "data continue break break finally: try: if process.returncode is None:", "= ' '.join(SingleQuote(str(c)) for c in args) if cwd is", "get 'str', # and there will be no type mismatch", "over a process's stdout. This is intentionally not public. Args:", "try: # Enable non-blocking reads from the child's stdout. child_fd", "merge_stderr=False): \"\"\"Executes a subprocess and returns its exit code, output,", "continue else: assert data is not None, ( 'Iteration received", "Returns: A shell snippet that does not include setting the", "list of command arguments. var_name: The variable that holds var_value.", "unsafe characters (e.g. space, quote, or other special characters such", "True: if end_time and time.time() > end_time: raise TimeoutError() try:", "lines: yield line if iter_timeout: iter_end = time.time() + iter_timeout", "executed. shell: Whether to execute args as a shell command.", "the encoding of the output, and in some output there", "to wait in between each iteration. If no output is", "shell command. env: If not None, a mapping that defines", "= Popen( args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env) pipe.communicate() return", "to logger (which defaults to stdout). \"\"\" (_, output) =", "stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn ) else: # opens stdout", "+ (cwd or '')) return Call(args, cwd=cwd) def GetCmdOutput(args, cwd=None,", "single quotes. \"\"\" return pipes.quote(s) def DoubleQuote(s): \"\"\"Return an shell-escaped", "stdin=None, stdout=None, stderr=None, shell=None, cwd=None, env=None): # preexec_fn isn't supported", "stderr=stderr, shell=shell, cwd=cwd, env=env) pipe.communicate() return pipe.wait() def RunCmd(args, cwd=None):", "iter_end_time and time.time() > iter_end_time: yield None iter_end_time = time.time()", "4096 else '') return process.returncode, str_output def IterCmdOutputLines(args, iter_timeout=None, timeout=None,", "poll_interval: The length of time to wait in calls to", "if data is None or cur_iter_timeout < 0: yield None", "to # uncaught exception. Decoding errors are unavoidable, as we", "run with shell=False') args = ' '.join(SingleQuote(str(c)) for c in", "queue import threading stdout_queue = queue.Queue() def read_process_stdout(): # TODO(jbudorick):", "if sys.platform == 'win32': close_fds = (stdin is None and", "no iter_timeout being set. ' 'cmd: %s' % cmd) #", "process: The process in question. iter_timeout: An optional length of", "set. ' 'cmd: %s' % cmd) # Construct lines to", "data def _IterProcessStdoutFcntl(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"An fcntl-based implementation", "if data and six.PY3: data = data.decode('utf-8', errors='ignore') return data", "must be run with shell=True') else: if shell: raise Exception('array", "shell. The set of characters that retain their special meaning", "'') return process.returncode, str_output def IterCmdOutputLines(args, iter_timeout=None, timeout=None, cwd=None, shell=False,", "decoding error, so that caller will not crash due to", "2-tuple (exit code, stdout). \"\"\" status, stdout, stderr = GetCmdStatusOutputAndError(", "time.time() if data is None or cur_iter_timeout < 0: yield", "command's stdout. Prints the command's stderr to logger (which defaults", "An optional length of time, in seconds, during which the", "The string quoted using single quotes. \"\"\" return pipes.quote(s) def", "special meaning may depend on the shell implementation. This set", "pass process.wait() reader_thread.join() _IterProcessStdout = (_IterProcessStdoutQueue if sys.platform == 'win32'", "on timeout. \"\"\" _ValidateAndLogCommand(args, cwd, shell) output = six.StringIO() process", "poll_interval=1): \"\"\"An fcntl-based implementation of _IterProcessStdout.\"\"\" # pylint: disable=too-many-nested-blocks import", "exception. process.kill() except OSError: pass process.wait() reader_thread.join() _IterProcessStdout = (_IterProcessStdoutQueue", "cwd, shell, env) return output def _ValidateAndLogCommand(args, cwd, shell): if", "cwd: If not None, the subprocess's current directory will be", "shell=shell, env=env, merge_stderr=merge_stderr) if stderr: logger.critical('STDERR: %s', stderr) logger.debug('STDOUT: %s%s',", "env=None): \"\"\"Starts a subprocess and returns a handle to the", "on all platforms. \"\"\" # pylint: disable=unused-argument if six.PY3: import", "the given time, this generator will yield None. timeout: An", "cwd=None, env=None): # preexec_fn isn't supported on windows. # pylint:", "buffer_size) if not data: break yield data if process.poll() is", "close_fds=close_fds, env=env, preexec_fn=preexec_fn, universal_newlines=True, encoding='utf-8', errors='ignore' ) def Call(args, stdout=None,", "crash due to # uncaught exception. Decoding errors are unavoidable,", "True and the process exited with a non-zero exit status.", "if data: yield data continue break break finally: try: if", "\"\"\" def shrink(value): parts = (x and SingleQuote(x) for x", "= poll_interval read_fds, _, _ = select.select([child_fd], [], [], iter_aware_poll_interval)", "the output, and in some output there # will be", "_IterProcessStdoutFcntl on all platforms. \"\"\" # pylint: disable=unused-argument if six.PY3:", "in seconds or None to wait forever. cwd: If not", "assert data is not None, ( 'Iteration received no data", "this is a suitable replacement for _IterProcessStdoutFcntl on all platforms.", "return value. Args: args: A string or a sequence of", "that holds var_value. var_value: The string to replace in cmd_parts", "any decoding error, so that caller will not crash due", "to |cwd| before it's executed. shell: Whether to execute args", "args is a sequence. env: If not None, a mapping", "> end_time: raise TimeoutError() try: s = stdout_queue.get(True, iter_timeout) if", "all output has been read. Yields: The output of the", "= (x and SingleQuote(x) for x in value.split(var_value)) with_substitutions =", "GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False, logfile=None, env=None): \"\"\"Executes a subprocess with", "six.PY3: data = data.decode('utf-8', errors='ignore') return data def _IterProcessStdoutFcntl(process, iter_timeout=None,", "= time.time() + iter_timeout cur_iter_timeout = iter_timeout for data in", "else: close_fds = True preexec_fn = lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL) if", "_IterProcessStdoutQueue(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"A Queue.Queue-based implementation of _IterProcessStdout.", "this source code is governed by a BSD-style license that", "Evaluate whether this is a suitable replacement for _IterProcessStdoutFcntl on", "status, stdout, stderr = GetCmdStatusOutputAndError( args, cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr)", "iter_timeout: Timeout for each iteration, in seconds. timeout: Timeout for", "is closed, keep checking for output data (because of timing", "with_substitutions or \"''\" return ' '.join(shrink(part) for part in cmd_parts)", "child_fd in read_fds: data = _read_and_decode(child_fd, buffer_size) if data: yield", "StartCmd(args, cwd=None, shell=False, env=None): \"\"\"Starts a subprocess and returns a", "Make sure the process doesn't stick around if we fail", "merge_stderr else subprocess.PIPE pipe = Popen( args, stdout=subprocess.PIPE, stderr=stderr, shell=shell,", "= pipe.communicate() return (pipe.returncode, stdout, stderr) class TimeoutError(base_error.BaseError): \"\"\"Module-specific timeout", "iter_timeout=None, timeout=None, cwd=None, shell=False, env=None, check_status=True): \"\"\"Executes a subprocess and", "directory will be changed to |cwd| before it's executed. shell:", "def Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None): pipe = Popen(", "output @property def output(self): return self._output def _read_and_decode(fd, buffer_size): data", "the first item in the args sequence. cwd: If not", "optional length of time, in seconds, to wait in between", "Pick an appropriate read size here. while True: try: output_chunk", "this generator will yield None. timeout: An optional length of", "\"\"\" (_, output) = GetCmdStatusAndOutput(args, cwd, shell, env) return output", "their special meaning may depend on the shell implementation. This", "else '') return (status, stdout) def StartCmd(args, cwd=None, shell=False, env=None):", "<gh_stars>1000+ # Copyright (c) 2012 The Chromium Authors. All rights", "else None while True: if end_time and time.time() > end_time:", "a subprocess to execute a program and returns its output.", "process handle from subprocess.Popen. \"\"\" _ValidateAndLogCommand(args, cwd, shell) return Popen(", "will receive output from the command as it is running.", "yield None iter_end_time = time.time() + iter_timeout if iter_end_time: iter_aware_poll_interval", "while True: read_fds, _, _ = select.select([child_fd], [], [], iter_aware_poll_interval)", "def GetCmdOutput(args, cwd=None, shell=False, env=None): \"\"\"Open a subprocess to execute", "before it's executed. shell: Whether to execute args as a", "is None: # Make sure the process doesn't stick around", "with shell=False') args = ' '.join(SingleQuote(str(c)) for c in args)", "each iteration, in seconds. timeout: Timeout for the entire command,", "universal_newlines=True, encoding='utf-8', errors='ignore' ) def Call(args, stdout=None, stderr=None, shell=None, cwd=None,", "length of time to wait in calls to `select.select`. If", "error. # Ignore any decoding error, so that caller will", "# Copyright (c) 2012 The Chromium Authors. All rights reserved.", "to read (and thus yield) at once. poll_interval: The length", "as '$'). The returned value can be used in a", "is received in the given time, this generator will yield", "try: output_chunk = _read_and_decode(process.stdout.fileno(), buffer_size) except IOError: break stdout_queue.put(output_chunk, True)", "of this source code is governed by a BSD-style license", "fl = fcntl.fcntl(child_fd, fcntl.F_GETFL) fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK) end_time", "= data.decode('utf-8', errors='ignore') return data def _IterProcessStdoutFcntl(process, iter_timeout=None, timeout=None, buffer_size=4096,", "else: assert data is not None, ( 'Iteration received no", "not None, the subprocess's current directory will be changed to", "timeout=timeout): if iter_timeout: # Check whether the current iteration has", "with a non-zero exit status. \"\"\" cmd = _ValidateAndLogCommand(args, cwd,", "s: break yield s except queue.Empty: yield None finally: try:", "return pipe.wait() def RunCmd(args, cwd=None): \"\"\"Opens a subprocess to execute", "exited with a non-zero exit status. \"\"\" cmd = _ValidateAndLogCommand(args,", "Args: args: List of arguments to the program, the program", "reserved. # Use of this source code is governed by", "errors='ignore') return data def _IterProcessStdoutFcntl(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"An", "stdout in text mode, so that caller side always get", "Args: s: The string to quote. Return: The string quoted", "if merge_stderr else subprocess.PIPE pipe = Popen( args, stdout=subprocess.PIPE, stderr=stderr,", "shell: Whether to execute args as a shell command. Must", "being set. ' 'cmd: %s' % cmd) # Construct lines", "reader_thread.join() _IterProcessStdout = (_IterProcessStdoutQueue if sys.platform == 'win32' else _IterProcessStdoutFcntl)", "some shell features such as variable interpolation. The returned value", "text mode, so that caller side always get 'str', #", "the timeout in seconds or None to wait forever. cwd:", "subprocess. Returns: The 2-tuple (exit code, output). Raises: TimeoutError on", "finally: try: if process.returncode is None: # Make sure the", "+ iter_timeout cur_iter_timeout = iter_timeout for data in _IterProcessStdout( process,", "as queue import threading stdout_queue = queue.Queue() def read_process_stdout(): #", "retain their special meaning may depend on the shell implementation.", "'' else: cwd = ':' + cwd logger.debug('[host]%s> %s', cwd,", "not crash due to # uncaught exception. Decoding errors are", "all platforms. \"\"\" # pylint: disable=unused-argument if six.PY3: import queue", "iter_end_time: iter_aware_poll_interval = min(poll_interval, max(0, iter_end_time - time.time())) else: iter_aware_poll_interval", "been read. Yields: The output of the subprocess, line by", "None: # Make sure the process doesn't stick around if", "returns its exit code, output, and errors. Args: args: A", "string and False if args is a sequence. logfile: Optional", "to happen. Args: cmd_parts: A list of command arguments. var_name:", "stderr) class TimeoutError(base_error.BaseError): \"\"\"Module-specific timeout exception.\"\"\" def __init__(self, output=None): super(TimeoutError,", "iter_timeout: # Check whether the current iteration has timed out.", "'`', '\\', '!', '*', and '@'. Args: s: The string", "# exception. process.kill() except OSError: pass process.wait() reader_thread.join() _IterProcessStdout =", "'') return (status, stdout) def StartCmd(args, cwd=None, shell=False, env=None): \"\"\"Starts", "iter_end = time.time() + iter_timeout continue else: assert data is", "= iter_end - time.time() if data is None or cur_iter_timeout", "it is running. env: If not None, a mapping that", "The returned value can be used in a shell command", "exit code, output, and errors. Args: args: A string or", "fcntl try: # Enable non-blocking reads from the child's stdout.", "quotes. \"\"\" return pipes.quote(s) def DoubleQuote(s): \"\"\"Return an shell-escaped version", "None to wait forever. cwd: If not None, the subprocess's", "environment variables for the subprocess. Returns: The 2-tuple (exit code,", "\"\"\" _ValidateAndLogCommand(args, cwd, shell) output = six.StringIO() process = Popen(", "special characters such as '$'). The returned value can be", "defaults to stdout). \"\"\" (_, output) = GetCmdStatusAndOutput(args, cwd, shell,", "element. iter_timeout: Timeout for each iteration, in seconds. timeout: Timeout", "if not s: return '\"\"' elif all(c in _SafeShellChars for", "The output of the subprocess, line by line. Raises: CalledProcessError", "unavoidable, as we # do not know the encoding of", "cwd = ':' + cwd logger.debug('[host]%s> %s', cwd, args) return", "if sys.platform == 'win32' else _IterProcessStdoutFcntl) \"\"\"Iterate over a process's", "An optional length of time, in seconds, to wait in", "else subprocess.PIPE pipe = Popen( args, stdout=subprocess.PIPE, stderr=stderr, shell=shell, cwd=cwd,", "number of bytes to read (and thus yield) at once.", "stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn ) else: #", "that will receive output from the command as it is", "that caller side always get 'str', # and there will", "as a shell command. env: If not None, a mapping", "defines environment variables for the subprocess. Returns: A process handle", "The length of time to wait in calls to `select.select`.", "encodings (e.g. adb logcat) return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout,", "string quoted using double quotes. \"\"\" if not s: return", "part of stdout. Returns: The 2-tuple (exit code, stdout). \"\"\"", "to quote. Return: The string quoted using double quotes. \"\"\"", "def _IterProcessStdoutFcntl(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"An fcntl-based implementation of", "implementation of _IterProcessStdout.\"\"\" # pylint: disable=too-many-nested-blocks import fcntl try: #", "def shrink(value): parts = (x and SingleQuote(x) for x in", "= time.time() + iter_timeout continue else: assert data is not", "import fcntl try: # Enable non-blocking reads from the child's", "has_incomplete_line else '' for line in lines: yield line if", "None, the subprocess's current directory will be changed to |cwd|", "length of time, in seconds, during which the process must", "it's executed. Returns: Return code from the command execution. \"\"\"", "will yield None. timeout: An optional length of time, in", "environment variables for the subprocess. merge_stderr: If True, captures stderr", "else: # opens stdout in text mode, so that caller", "and returns its output. Args: args: A string or a", "import codecs import logging import os import pipes import select", "output of the subprocess, line by line. Raises: CalledProcessError if", "None iter_end = time.time() + iter_timeout continue else: assert data", "six from devil import base_error logger = logging.getLogger(__name__) _SafeShellChars =", "Raises: TimeoutError: if timeout is set and the process does", "codecs import logging import os import pipes import select import", "return (pipe.returncode, stdout, stderr) class TimeoutError(base_error.BaseError): \"\"\"Module-specific timeout exception.\"\"\" def", "exit code and output. Args: args: A string or a", "Yields: The output of the subprocess, line by line. Raises:", "The maximum number of bytes to read (and thus yield)", "of data or None. \"\"\" def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False,", "Exception('array args must be run with shell=False') args = '", "mapping that defines environment variables for the subprocess. check_status: A", "logfile.write(data) output.write(data) except TimeoutError: raise TimeoutError(output.getvalue()) str_output = output.getvalue() logger.debug('STDOUT+STDERR:", "a subprocess and returns its exit code, output, and errors.", "is set and the process does not complete. Yields: basestrings", "_ValidateAndLogCommand(args, cwd, shell) return Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell, cwd=cwd,", "element. timeout: the timeout in seconds or None to wait", "mapping that defines environment variables for the subprocess. Returns: The", "return '\"\"' elif all(c in _SafeShellChars for c in s):", "= min(poll_interval, max(0, iter_end_time - time.time())) else: iter_aware_poll_interval = poll_interval", "a sequence. env: If not None, a mapping that defines", "will be multiple encodings (e.g. adb logcat) return subprocess.Popen( args=args,", "the shell implementation. This set usually includes: '$', '`', '\\',", "queue else: import Queue as queue import threading stdout_queue =", "program, the program to execute is the first element. timeout:", "to be interpreted literally. Args: s: The string to quote.", "item in the args sequence. cwd: If not None, the", "of the output, and in some output there # will", "else: import Queue as queue import threading stdout_queue = queue.Queue()", "which may contain unsafe characters (e.g. space, quote, or other", "except TimeoutError: raise TimeoutError(output.getvalue()) str_output = output.getvalue() logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(),", "stderr as part of stdout. Returns: The 3-tuple (exit code,", "Timeout for each iteration, in seconds. timeout: Timeout for the", "Return value doesn't matter. if six.PY2: codecs.lookup('string-escape') def SingleQuote(s): \"\"\"Return", "data: break yield data if process.poll() is not None: #", "import base_error logger = logging.getLogger(__name__) _SafeShellChars = frozenset(string.ascii_letters + string.digits", "if shell: raise Exception('array args must be run with shell=False')", "an appropriate read size here. while True: try: output_chunk =", "data = data.decode('utf-8', errors='ignore') return data def _IterProcessStdoutFcntl(process, iter_timeout=None, timeout=None,", "> 4096 else '') return process.returncode, str_output def IterCmdOutputLines(args, iter_timeout=None,", "# and there will be no type mismatch error. #", "iter_aware_poll_interval = poll_interval read_fds, _, _ = select.select([child_fd], [], [],", "= (time.time() + timeout) if timeout else None try: while", "token that gets to be further interpreted by the shell.", "end_time: raise TimeoutError() if iter_end_time and time.time() > iter_end_time: yield", "shell-escaped version of the string using single quotes. Reliably quote", "'')) return Call(args, cwd=cwd) def GetCmdOutput(args, cwd=None, shell=False, env=None): \"\"\"Open", "for x in value.split(var_value)) with_substitutions = ('\"$%s\"' % var_name).join(parts) return", "subprocess. check_status: A boolean indicating whether to check the exit", "try: s = stdout_queue.get(True, iter_timeout) if not s: break yield", "variable. \"\"\" def shrink(value): parts = (x and SingleQuote(x) for", "due to # uncaught exception. Decoding errors are unavoidable, as", "from subprocess.Popen. \"\"\" _ValidateAndLogCommand(args, cwd, shell) return Popen( args, stdout=subprocess.PIPE,", "else: if shell: raise Exception('array args must be run with", "read_fds: data = _read_and_decode(child_fd, buffer_size) if data: yield data continue", "Raises: TimeoutError on timeout. \"\"\" _ValidateAndLogCommand(args, cwd, shell) output =", "for the subprocess. merge_stderr: If True, captures stderr as part", "signal.SIG_DFL) if six.PY2: return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr,", "output def _ValidateAndLogCommand(args, cwd, shell): if isinstance(args, six.string_types): if not", "# pylint: disable=unexpected-keyword-arg if sys.platform == 'win32': close_fds = (stdin", "# Enable non-blocking reads from the child's stdout. child_fd =", "List of arguments to the program, the program to execute", "data is not None, ( 'Iteration received no data despite", "intentionally not public. Args: process: The process in question. iter_timeout:", "of characters that retain their special meaning may depend on", "iter_end = time.time() + iter_timeout if buffer_output: yield buffer_output if", "buffer_output = '' iter_end = None cur_iter_timeout = None if", "value doesn't matter. if six.PY2: codecs.lookup('string-escape') def SingleQuote(s): \"\"\"Return an", "cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn, universal_newlines=True, encoding='utf-8',", "env: If not None, a mapping that defines environment variables", "to |cwd| before it's executed. Returns: Return code from the", "of time to wait in calls to `select.select`. If iter_timeout", "command, in seconds. cwd: If not None, the subprocess's current", "timeout is set and the process does not complete. Yields:", "# Ignore any decoding error, so that caller will not", "def RunCmd(args, cwd=None): \"\"\"Opens a subprocess to execute a program", "lines.pop() if has_incomplete_line else '' for line in lines: yield", "and False if args is a sequence. env: If not", "close_fds = True preexec_fn = lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL) if six.PY2:", "implementation of _IterProcessStdout. TODO(jbudorick): Evaluate whether this is a suitable", "\"\"\" cmd = _ValidateAndLogCommand(args, cwd, shell) process = Popen( args,", "if iter_timeout: # Check whether the current iteration has timed", "check_status=check_status) def _IterCmdOutputLines(process, cmd, iter_timeout=None, timeout=None, check_status=True): buffer_output = ''", "env=None): pipe = Popen( args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env)", "and '@'. Args: s: The string to quote. Return: The", "program to execute is the first element. iter_timeout: Timeout for", "interpreted literally. Args: s: The string to quote. Return: The", "(time.time() + iter_timeout) if iter_timeout else None while True: if", "env=env, preexec_fn=preexec_fn, universal_newlines=True, encoding='utf-8', errors='ignore' ) def Call(args, stdout=None, stderr=None,", "\"\"\" def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False, logfile=None, env=None): \"\"\"Executes a", "yield None finally: try: if process.returncode is None: # Make", "after all output has been read. Yields: The output of", "buffer_size) except IOError: break stdout_queue.put(output_chunk, True) if not output_chunk and", "return pipes.quote(s) def DoubleQuote(s): \"\"\"Return an shell-escaped version of the", "stderr as part of stdout. Returns: The 2-tuple (exit code,", "the LICENSE file. \"\"\"A wrapper for subprocess to make calling", "if args is a sequence. logfile: Optional file-like object that", "base_error logger = logging.getLogger(__name__) _SafeShellChars = frozenset(string.ascii_letters + string.digits +", "so, a TimeoutError will be raised. buffer_size: The maximum number", "' + (cwd or '')) return Call(args, cwd=cwd) def GetCmdOutput(args,", "stdout_queue.put(output_chunk, True) if not output_chunk and process.poll() is not None:", "= '' else: cwd = ':' + cwd logger.debug('[host]%s> %s',", "+ '\"' def ShrinkToSnippet(cmd_parts, var_name, var_value): \"\"\"Constructs a shell snippet", "'\\\\\"') + '\"' def ShrinkToSnippet(cmd_parts, var_name, var_value): \"\"\"Constructs a shell", "a shell command. env: If not None, a mapping that", "errors='ignore' ) def Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None): pipe", "command as it is running. env: If not None, a", "data and six.PY3: data = data.decode('utf-8', errors='ignore') return data def", "\"\"\"Return an shell-escaped version of the string using single quotes.", "time.time() + iter_timeout if iter_end_time: iter_aware_poll_interval = min(poll_interval, max(0, iter_end_time", "stdout_queue = queue.Queue() def read_process_stdout(): # TODO(jbudorick): Pick an appropriate", "or a sequence of program arguments. The program to execute", "timeout=timeout, check_status=check_status) def _IterCmdOutputLines(process, cmd, iter_timeout=None, timeout=None, check_status=True): buffer_output =", "= _read_and_decode(process.stdout.fileno(), buffer_size) except IOError: break stdout_queue.put(output_chunk, True) if not", "shell=shell, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) try: for data in _IterProcessStdout(process, timeout=timeout):", "shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn ) else: # opens stdout in", "code, output, and errors. Args: args: A string or a", "timeout: Timeout for the entire command, in seconds. cwd: If", "shell snippet that does not include setting the variable. \"\"\"", "a process's stdout. This is intentionally not public. Args: process:", "process exited with a non-zero exit status. \"\"\" cmd =", "it # later. Return value doesn't matter. if six.PY2: codecs.lookup('string-escape')", "to execute is the first element. timeout: the timeout in", "sequence. env: If not None, a mapping that defines environment", "c in args) if cwd is None: cwd = ''", "cwd=cwd, env=env) def GetCmdStatusOutputAndError(args, cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes a", "iteration. If no output is received in the given time,", "fail with an # exception. process.kill() except OSError: pass process.wait()", "\"\"\" _ValidateAndLogCommand(args, cwd, shell) stderr = subprocess.STDOUT if merge_stderr else", "commands easier.\"\"\" import codecs import logging import os import pipes", "end_time and time.time() > end_time: raise TimeoutError() try: s =", "is True and the process exited with a non-zero exit", "the program to execute is the first element. iter_timeout: Timeout", "time.time() + iter_timeout continue else: assert data is not None,", "defines environment variables for the subprocess. merge_stderr: If True, captures", "[], [], iter_aware_poll_interval) if child_fd in read_fds: data = _read_and_decode(child_fd,", "lines to yield from raw data. buffer_output += data has_incomplete_line", "buffer_output += data has_incomplete_line = buffer_output[-1] not in '\\r\\n' lines", "take precedence. Raises: TimeoutError: if timeout is set and the", "exception. Decoding errors are unavoidable, as we # do not", "else '' for line in lines: yield line if iter_timeout:", "is governed by a BSD-style license that can be #", "\"\"\"Return an shell-escaped version of the string using double quotes.", "be no type mismatch error. # Ignore any decoding error,", "set, the remaining length of time in the iteration may", "precedence. Raises: TimeoutError: if timeout is set and the process", "iter_end - time.time() if data is None or cur_iter_timeout <", "output.write(data) except TimeoutError: raise TimeoutError(output.getvalue()) str_output = output.getvalue() logger.debug('STDOUT+STDERR: %s%s',", "raise Exception('string args must be run with shell=True') else: if", "caller side always get 'str', # and there will be", "variable that holds var_value. var_value: The string to replace in", "command line as one token that gets to be further", "'$', '`', '\\', '!', '*', and '@'. Args: s: The", "_ValidateAndLogCommand(args, cwd, shell) process = Popen( args, cwd=cwd, shell=shell, env=env,", "args sequence. cwd: If not None, the subprocess's current directory", "Returns: The 2-tuple (exit code, output). Raises: TimeoutError on timeout.", "iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"An fcntl-based implementation of _IterProcessStdout.\"\"\" #", "the program, the program to execute is the first element.", "side always get 'str', # and there will be no", "subprocess and returns its exit code and output. Args: args:", "for output data (because of timing # issues). while True:", "if not shell: raise Exception('string args must be run with", "mapping that defines environment variables for the subprocess. Returns: Captures", "if child_fd in read_fds: data = _read_and_decode(child_fd, buffer_size) if data:", "Popen( args, stdout=subprocess.PIPE, stderr=stderr, shell=shell, cwd=cwd, env=env) stdout, stderr =", "yield data continue break break finally: try: if process.returncode is", "= time.time() + iter_timeout if buffer_output: yield buffer_output if check_status", "(time.time() + timeout) if timeout else None iter_end_time = (time.time()", "logfile=None, env=None): \"\"\"Executes a subprocess with a timeout. Args: args:", "+ cwd logger.debug('[host]%s> %s', cwd, args) return args def GetCmdStatusAndOutput(args,", "be changed to |cwd| before it's executed. Returns: Return code", "do so, a TimeoutError will be raised. buffer_size: The maximum", "Use of this source code is governed by a BSD-style", "in the args sequence. cwd: If not None, the subprocess's", "disable=too-many-nested-blocks import fcntl try: # Enable non-blocking reads from the", "end_time = (time.time() + timeout) if timeout else None try:", "can find it # later. Return value doesn't matter. if", "A list of command arguments. var_name: The variable that holds", "the current iteration has timed out. cur_iter_timeout = iter_end -", "from the child's stdout. child_fd = process.stdout.fileno() fl = fcntl.fcntl(child_fd,", "var_name, var_value): \"\"\"Constructs a shell snippet for a command using", "not None: break reader_thread = threading.Thread(target=read_process_stdout) reader_thread.start() end_time = (time.time()", "stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return _IterCmdOutputLines( process, cmd, iter_timeout=iter_timeout, timeout=timeout, check_status=check_status) def", "arguments. The program to execute is the string or the", "break stdout_queue.put(output_chunk, True) if not output_chunk and process.poll() is not", "no data despite no iter_timeout being set. ' 'cmd: %s'", "\"\"\" if not s: return '\"\"' elif all(c in _SafeShellChars", "process must finish. If it fails to do so, a", "if iter_timeout: iter_end = time.time() + iter_timeout cur_iter_timeout = iter_timeout", "if iter_end_time and time.time() > iter_end_time: yield None iter_end_time =", "not public. Args: process: The process in question. iter_timeout: An", "timeout=timeout): if logfile: logfile.write(data) output.write(data) except TimeoutError: raise TimeoutError(output.getvalue()) str_output", "cwd=None, shell=False, env=None): \"\"\"Starts a subprocess and returns a handle", "of time, in seconds, during which the process must finish.", "stdout, stderr) class TimeoutError(base_error.BaseError): \"\"\"Module-specific timeout exception.\"\"\" def __init__(self, output=None):", "stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn, universal_newlines=True, encoding='utf-8', errors='ignore' )", "continuously yields lines from its output. Args: args: List of", "data despite no iter_timeout being set. ' 'cmd: %s' %", "for the subprocess. Returns: A process handle from subprocess.Popen. \"\"\"", "time to wait in calls to `select.select`. If iter_timeout is", "cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn ) else:", "not s: break yield s except queue.Empty: yield None finally:", "returns the command's stdout. Prints the command's stderr to logger", "it's executed. shell: Whether to execute args as a shell", "and stdout is None and stderr is None) preexec_fn =", "(x and SingleQuote(x) for x in value.split(var_value)) with_substitutions = ('\"$%s\"'", "timed out. cur_iter_timeout = iter_end - time.time() if data is", "that needs to happen. Args: cmd_parts: A list of command", "rights reserved. # Use of this source code is governed", "preexec_fn = None else: close_fds = True preexec_fn = lambda:", "Prints the command's stderr to logger (which defaults to stdout).", "using double quotes. \"\"\" if not s: return '\"\"' elif", "The 3-tuple (exit code, stdout, stderr). \"\"\" _ValidateAndLogCommand(args, cwd, shell)", "subprocess.STDOUT if merge_stderr else subprocess.PIPE pipe = Popen( args, stdout=subprocess.PIPE,", "be used in a shell command line as one token", "string or a sequence of program arguments. The program to", "(e.g. space, quote, or other special characters such as '$').", "buffer_size=4096, poll_interval=1): \"\"\"A Queue.Queue-based implementation of _IterProcessStdout. TODO(jbudorick): Evaluate whether", "var_value: The string to replace in cmd_parts with $var_name Returns:", "a program and returns its output. Args: args: A string", "length of time, in seconds, to wait in between each", "there # will be multiple encodings (e.g. adb logcat) return", "= True preexec_fn = lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL) if six.PY2: return", "the string or the first item in the args sequence.", "= GetCmdStatusAndOutput(args, cwd, shell, env) return output def _ValidateAndLogCommand(args, cwd,", "shell commands easier.\"\"\" import codecs import logging import os import", "process in question. iter_timeout: An optional length of time, in", "not in '\\r\\n' lines = buffer_output.splitlines() buffer_output = lines.pop() if", "does not include setting the variable. \"\"\" def shrink(value): parts", "output data (because of timing # issues). while True: read_fds,", "stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env) pipe.communicate() return pipe.wait() def RunCmd(args,", "else: return '\"' + s.replace('\"', '\\\\\"') + '\"' def ShrinkToSnippet(cmd_parts,", "2-tuple (exit code, output). Raises: TimeoutError on timeout. \"\"\" _ValidateAndLogCommand(args,", "+ s.replace('\"', '\\\\\"') + '\"' def ShrinkToSnippet(cmd_parts, var_name, var_value): \"\"\"Constructs", "maximum number of bytes to read (and thus yield) at", "of _IterProcessStdout. TODO(jbudorick): Evaluate whether this is a suitable replacement", "wait in calls to `select.select`. If iter_timeout is set, the", "an shell-escaped version of the string using single quotes. Reliably", "the string using double quotes. Reliably quote a string which", "the subprocess. Returns: Captures and returns the command's stdout. Prints", "Returns: A process handle from subprocess.Popen. \"\"\" _ValidateAndLogCommand(args, cwd, shell)", "cur_iter_timeout = iter_end - time.time() if data is None or", "pipes import select import signal import string import subprocess import", "args must be run with shell=False') args = ' '.join(SingleQuote(str(c))", "codecs.lookup('string-escape') def SingleQuote(s): \"\"\"Return an shell-escaped version of the string", "'str', # and there will be no type mismatch error.", "RunCmd(args, cwd=None): \"\"\"Opens a subprocess to execute a program and", "license that can be # found in the LICENSE file.", "or None to wait forever. cwd: If not None, the", "make calling shell commands easier.\"\"\" import codecs import logging import", "in s): return s else: return '\"' + s.replace('\"', '\\\\\"')", "subprocess and returns its exit code, output, and errors. Args:", "line as one token that gets to be further interpreted", "keep checking for output data (because of timing # issues).", "program, the program to execute is the first element. iter_timeout:", "'\\', '!', '*', and '@'. Args: s: The string to", "environment variables for the subprocess. check_status: A boolean indicating whether", "has_incomplete_line = buffer_output[-1] not in '\\r\\n' lines = buffer_output.splitlines() buffer_output", "time.time() + iter_timeout if buffer_output: yield buffer_output if check_status and", "merge_stderr=merge_stderr) if stderr: logger.critical('STDERR: %s', stderr) logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(), '<truncated>'", "# Use of this source code is governed by a", "calling shell commands easier.\"\"\" import codecs import logging import os", "try: for data in _IterProcessStdout(process, timeout=timeout): if logfile: logfile.write(data) output.write(data)", "to execute is the first element. iter_timeout: Timeout for each", "return Call(args, cwd=cwd) def GetCmdOutput(args, cwd=None, shell=False, env=None): \"\"\"Open a", "Enable non-blocking reads from the child's stdout. child_fd = process.stdout.fileno()", "+= data has_incomplete_line = buffer_output[-1] not in '\\r\\n' lines =", "def GetCmdStatusAndOutput(args, cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes a subprocess and", "for c in s): return s else: return '\"' +", "logfile: Optional file-like object that will receive output from the", "each iteration. If no output is received in the given", "for data in _IterProcessStdout( process, iter_timeout=cur_iter_timeout, timeout=timeout): if iter_timeout: #", "Returns: Return code from the command execution. \"\"\" logger.debug(str(args) +", "(c) 2012 The Chromium Authors. All rights reserved. # Use", "end_time = (time.time() + timeout) if timeout else None iter_end_time", "Exception('string args must be run with shell=True') else: if shell:", "cwd = '' else: cwd = ':' + cwd logger.debug('[host]%s>", "here. while True: try: output_chunk = _read_and_decode(process.stdout.fileno(), buffer_size) except IOError:", "check_status=True): \"\"\"Executes a subprocess and continuously yields lines from its", "Return: The string quoted using single quotes. \"\"\" return pipes.quote(s)", "stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) try: for data in _IterProcessStdout(process, timeout=timeout): if", "str_output = output.getvalue() logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(), '<truncated>' if len(str_output) >", "True preexec_fn = lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL) if six.PY2: return subprocess.Popen(", "data or None. \"\"\" def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False, logfile=None,", "a BSD-style license that can be # found in the", "wait in between each iteration. If no output is received", "(status, stdout) def StartCmd(args, cwd=None, shell=False, env=None): \"\"\"Starts a subprocess", "stdout, stderr). \"\"\" _ValidateAndLogCommand(args, cwd, shell) stderr = subprocess.STDOUT if", "\"\"\"Constructs a shell snippet for a command using a variable", "True: if end_time and time.time() > end_time: raise TimeoutError() if", "preexec_fn=preexec_fn ) else: # opens stdout in text mode, so", "seconds, during which the process must finish. If it fails", "s except queue.Empty: yield None finally: try: if process.returncode is", "finish. If it fails to do so, a TimeoutError will", "return args def GetCmdStatusAndOutput(args, cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes a", "retaining some shell features such as variable interpolation. The returned", "line if iter_timeout: iter_end = time.time() + iter_timeout if buffer_output:", "process.kill() except OSError: pass process.wait() reader_thread.join() _IterProcessStdout = (_IterProcessStdoutQueue if", "stderr=subprocess.STDOUT) return _IterCmdOutputLines( process, cmd, iter_timeout=iter_timeout, timeout=timeout, check_status=check_status) def _IterCmdOutputLines(process,", "an # exception. process.kill() except OSError: pass process.wait() reader_thread.join() _IterProcessStdout", "shell): if isinstance(args, six.string_types): if not shell: raise Exception('string args", "as one token that gets to be further interpreted by", "(exit code, stdout, stderr). \"\"\" _ValidateAndLogCommand(args, cwd, shell) stderr =", "args: List of arguments to the program, the program to", "Returns: Captures and returns the command's stdout. Prints the command's", "TimeoutError will be raised. buffer_size: The maximum number of bytes", "fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK) end_time = (time.time() + timeout)", "that defines environment variables for the subprocess. Returns: Captures and", "code, stdout). \"\"\" status, stdout, stderr = GetCmdStatusOutputAndError( args, cwd=cwd,", "quoting that needs to happen. Args: cmd_parts: A list of", "shell snippet for a command using a variable to shrink", "the exit status of the process after all output has", "for the subprocess. check_status: A boolean indicating whether to check", "include setting the variable. \"\"\" def shrink(value): parts = (x", "else: cwd = ':' + cwd logger.debug('[host]%s> %s', cwd, args)", "or None. \"\"\" def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False, logfile=None, env=None):", "will be no type mismatch error. # Ignore any decoding", "iter_end_time = time.time() + iter_timeout if iter_end_time: iter_aware_poll_interval = min(poll_interval,", "all(c in _SafeShellChars for c in s): return s else:", "non-zero exit status. \"\"\" cmd = _ValidateAndLogCommand(args, cwd, shell) process", "'..')) SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six') if SIX_PATH not in", "cwd is None: cwd = '' else: cwd = ':'", "%s%s', str_output[:4096].rstrip(), '<truncated>' if len(str_output) > 4096 else '') return", "process = Popen( args, cwd=cwd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) try:", "wrapper for subprocess to make calling shell commands easier.\"\"\" import", "during which the process must finish. If it fails to", "we fail with an # exception. process.kill() except OSError: pass", "yield from raw data. buffer_output += data has_incomplete_line = buffer_output[-1]", "lines = buffer_output.splitlines() buffer_output = lines.pop() if has_incomplete_line else ''", "if not s: break yield s except queue.Empty: yield None", "to wait forever. cwd: If not None, the subprocess's current", "process is closed, keep checking for output data (because of", "# Make sure the process doesn't stick around if we", "env=env) def GetCmdStatusOutputAndError(args, cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes a subprocess", "the subprocess. Returns: The 2-tuple (exit code, output). Raises: TimeoutError", "or other special characters such as '$'). The returned value", "env) return output def _ValidateAndLogCommand(args, cwd, shell): if isinstance(args, six.string_types):", "from raw data. buffer_output += data has_incomplete_line = buffer_output[-1] not", "not shell: raise Exception('string args must be run with shell=True')", "ensure subprocess can find it # later. Return value doesn't", "preexec_fn=preexec_fn, universal_newlines=True, encoding='utf-8', errors='ignore' ) def Call(args, stdout=None, stderr=None, shell=None,", "subprocess.PIPE pipe = Popen( args, stdout=subprocess.PIPE, stderr=stderr, shell=shell, cwd=cwd, env=env)", "args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell, cwd=cwd, env=env) def GetCmdStatusOutputAndError(args, cwd=None, shell=False,", "stderr: logger.critical('STDERR: %s', stderr) logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(), '<truncated>' if len(stdout)", "Whether to execute args as a shell command. Must be", "sure the process doesn't stick around if we fail with", "'@%_-+=:,./') # Cache the string-escape codec to ensure subprocess can", "except OSError: pass process.wait() def _IterProcessStdoutQueue(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1):", "be multiple encodings (e.g. adb logcat) return subprocess.Popen( args=args, cwd=cwd,", "try: if process.returncode is None: # Make sure the process", "source code is governed by a BSD-style license that can", "to quote. Return: The string quoted using single quotes. \"\"\"", "'\"' def ShrinkToSnippet(cmd_parts, var_name, var_value): \"\"\"Constructs a shell snippet for", "os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six') if SIX_PATH not in sys.path: sys.path.append(SIX_PATH) import", "program arguments. The program to execute is the string or", "if six.PY3: import queue else: import Queue as queue import", "be raised. buffer_size: The maximum number of bytes to read", "in the LICENSE file. \"\"\"A wrapper for subprocess to make", "in cmd_parts with $var_name Returns: A shell snippet that does", "poll_interval read_fds, _, _ = select.select([child_fd], [], [], iter_aware_poll_interval) if", "_read_and_decode(child_fd, buffer_size) if not data: break yield data if process.poll()", "False if args is a sequence. logfile: Optional file-like object", "in '\\r\\n' lines = buffer_output.splitlines() buffer_output = lines.pop() if has_incomplete_line", "(_, output) = GetCmdStatusAndOutput(args, cwd, shell, env) return output def", "Args: cmd_parts: A list of command arguments. var_name: The variable", "execute a program and returns its return value. Args: args:", "string to replace in cmd_parts with $var_name Returns: A shell", "stdout, stderr = pipe.communicate() return (pipe.returncode, stdout, stderr) class TimeoutError(base_error.BaseError):", "an shell-escaped version of the string using double quotes. Reliably", "subprocess's current directory will be changed to |cwd| before it's", "for the subprocess. Returns: The 2-tuple (exit code, output). Raises:", "def Popen(args, stdin=None, stdout=None, stderr=None, shell=None, cwd=None, env=None): # preexec_fn", "iter_aware_poll_interval) if child_fd in read_fds: data = _read_and_decode(child_fd, buffer_size) if", "_IterProcessStdout = (_IterProcessStdoutQueue if sys.platform == 'win32' else _IterProcessStdoutFcntl) \"\"\"Iterate", "the args sequence. cwd: If not None, the subprocess's current", "(_IterProcessStdoutQueue if sys.platform == 'win32' else _IterProcessStdoutFcntl) \"\"\"Iterate over a", "has timed out. cur_iter_timeout = iter_end - time.time() if data", "a TimeoutError will be raised. buffer_size: The maximum number of", "are unavoidable, as we # do not know the encoding", "shell command line as one token that gets to be", "= fcntl.fcntl(child_fd, fcntl.F_GETFL) fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK) end_time =", "of arguments to the program, the program to execute is", "variables for the subprocess. Returns: The 2-tuple (exit code, output).", "env=None): # preexec_fn isn't supported on windows. # pylint: disable=unexpected-keyword-arg", "_ValidateAndLogCommand(args, cwd, shell) output = six.StringIO() process = Popen( args,", "LICENSE file. \"\"\"A wrapper for subprocess to make calling shell", "doesn't matter. if six.PY2: codecs.lookup('string-escape') def SingleQuote(s): \"\"\"Return an shell-escaped", "sys.path: sys.path.append(SIX_PATH) import six from devil import base_error logger =", "if stderr: logger.critical('STDERR: %s', stderr) logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(), '<truncated>' if", "child's stdout. child_fd = process.stdout.fileno() fl = fcntl.fcntl(child_fd, fcntl.F_GETFL) fcntl.fcntl(child_fd,", "object that will receive output from the command as it", "iter_timeout) if not s: break yield s except queue.Empty: yield", "if args is a string and False if args is", "checking for output data (because of timing # issues). while", "# issues). while True: read_fds, _, _ = select.select([child_fd], [],", "Args: args: A string or a sequence of program arguments.", "quote. Return: The string quoted using single quotes. \"\"\" return", "of command arguments. var_name: The variable that holds var_value. var_value:", "None: break reader_thread = threading.Thread(target=read_process_stdout) reader_thread.start() end_time = (time.time() +", "\"\"\"Executes a subprocess and returns its exit code, output, and", "The string to quote. Return: The string quoted using single", "cur_iter_timeout < 0: yield None iter_end = time.time() + iter_timeout", "args) return args def GetCmdStatusAndOutput(args, cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes", "as it is running. env: If not None, a mapping", "before it's executed. Returns: Return code from the command execution.", "literally. Args: s: The string to quote. Return: The string", "timeout else None iter_end_time = (time.time() + iter_timeout) if iter_timeout", "\"\"\" logger.debug(str(args) + ' ' + (cwd or '')) return", "timeout=None, cwd=None, shell=False, env=None, check_status=True): \"\"\"Executes a subprocess and continuously", "yield) at once. poll_interval: The length of time to wait", "if has_incomplete_line else '' for line in lines: yield line", "time.time() > end_time: raise TimeoutError() try: s = stdout_queue.get(True, iter_timeout)", "and SingleQuote(x) for x in value.split(var_value)) with_substitutions = ('\"$%s\"' %", "a handle to the process. Args: args: A string or", "Captures and returns the command's stdout. Prints the command's stderr", "GetCmdStatusOutputAndError(args, cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes a subprocess and returns", "a subprocess and continuously yields lines from its output. Args:", "to stdout). \"\"\" (_, output) = GetCmdStatusAndOutput(args, cwd, shell, env)", "of stdout. Returns: The 3-tuple (exit code, stdout, stderr). \"\"\"", "if iter_end_time: iter_aware_poll_interval = min(poll_interval, max(0, iter_end_time - time.time())) else:", "return s else: return '\"' + s.replace('\"', '\\\\\"') + '\"'", "return ' '.join(shrink(part) for part in cmd_parts) def Popen(args, stdin=None,", "\"\"\" status, stdout, stderr = GetCmdStatusOutputAndError( args, cwd=cwd, shell=shell, env=env,", "for part in cmd_parts) def Popen(args, stdin=None, stdout=None, stderr=None, shell=None,", "and errors. Args: args: A string or a sequence of", "\"\"\"An fcntl-based implementation of _IterProcessStdout.\"\"\" # pylint: disable=too-many-nested-blocks import fcntl", "used in a shell command line as one token that", "devil import base_error logger = logging.getLogger(__name__) _SafeShellChars = frozenset(string.ascii_letters +", "line as one token that gets to be interpreted literally.", "in seconds. timeout: Timeout for the entire command, in seconds.", "args, cwd=cwd, shell=shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return _IterCmdOutputLines( process, cmd,", "\"\"\"Starts a subprocess and returns a handle to the process.", "(exit code, stdout). \"\"\" status, stdout, stderr = GetCmdStatusOutputAndError( args,", "iter_timeout cur_iter_timeout = iter_timeout for data in _IterProcessStdout( process, iter_timeout=cur_iter_timeout,", "as a shell command. Must be True if args is", "complete. Yields: basestrings of data or None. \"\"\" def GetCmdStatusAndOutputWithTimeout(args,", "Decoding errors are unavoidable, as we # do not know", "reader_thread.start() end_time = (time.time() + timeout) if timeout else None", "subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn,", "shell, env) return output def _ValidateAndLogCommand(args, cwd, shell): if isinstance(args,", "in text mode, so that caller side always get 'str',", "- time.time() if data is None or cur_iter_timeout < 0:", "buffer_size) if data and six.PY3: data = data.decode('utf-8', errors='ignore') return", "size here. while True: try: output_chunk = _read_and_decode(process.stdout.fileno(), buffer_size) except", "None and stderr is None) preexec_fn = None else: close_fds", "status of the process after all output has been read.", "iter_timeout being set. ' 'cmd: %s' % cmd) # Construct", "|cwd| before it's executed. shell: Whether to execute args as", "if len(stdout) > 4096 else '') return (status, stdout) def", "+ '@%_-+=:,./') # Cache the string-escape codec to ensure subprocess", "stdout). \"\"\" status, stdout, stderr = GetCmdStatusOutputAndError( args, cwd=cwd, shell=shell,", "shell=False, env=None): \"\"\"Open a subprocess to execute a program and", "iter_timeout=None, timeout=None, check_status=True): buffer_output = '' iter_end = None cur_iter_timeout", "if we fail with an # exception. process.kill() except OSError:", "os import pipes import select import signal import string import", "= lines.pop() if has_incomplete_line else '' for line in lines:", "< 0: yield None iter_end = time.time() + iter_timeout continue", "_ValidateAndLogCommand(args, cwd, shell): if isinstance(args, six.string_types): if not shell: raise", "cmd_parts with $var_name Returns: A shell snippet that does not", "shell features such as variable interpolation. The returned value can", "the process doesn't stick around if we fail with an", "stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn ) else: # opens", "signal import string import subprocess import sys import time CATAPULT_ROOT_PATH", "logcat) return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds,", "appropriate read size here. while True: try: output_chunk = _read_and_decode(process.stdout.fileno(),", "in read_fds: data = _read_and_decode(child_fd, buffer_size) if data: yield data", "yields lines from its output. Args: args: List of arguments", "the string-escape codec to ensure subprocess can find it #", "first item in the args sequence. cwd: If not None,", "remaining length of time in the iteration may take precedence.", "cwd, shell) output = six.StringIO() process = Popen( args, cwd=cwd,", "platforms. \"\"\" # pylint: disable=unused-argument if six.PY3: import queue else:", "Optional file-like object that will receive output from the command", "\"''\" return ' '.join(shrink(part) for part in cmd_parts) def Popen(args,", "env=None, merge_stderr=False): \"\"\"Executes a subprocess and returns its exit code,", "args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn )", "received no data despite no iter_timeout being set. ' 'cmd:", "the process does not complete. Yields: basestrings of data or", "from devil import base_error logger = logging.getLogger(__name__) _SafeShellChars = frozenset(string.ascii_letters", "if cwd is None: cwd = '' else: cwd =", "can be used in a shell command line as one", "string or the first item in the args sequence. cwd:", "process does not complete. Yields: basestrings of data or None.", "thus yield) at once. poll_interval: The length of time to", "A shell snippet that does not include setting the variable.", "Cache the string-escape codec to ensure subprocess can find it", "import six from devil import base_error logger = logging.getLogger(__name__) _SafeShellChars", "isinstance(args, six.string_types): if not shell: raise Exception('string args must be", "is a sequence. logfile: Optional file-like object that will receive", "The Chromium Authors. All rights reserved. # Use of this", "using a variable to shrink it. Takes into account all", "data = _read_and_decode(child_fd, buffer_size) if data: yield data continue break", "as we # do not know the encoding of the", "time.time() + iter_timeout cur_iter_timeout = iter_timeout for data in _IterProcessStdout(", "mapping that defines environment variables for the subprocess. merge_stderr: If", "an # exception. process.kill() except OSError: pass process.wait() def _IterProcessStdoutQueue(process,", "if end_time and time.time() > end_time: raise TimeoutError() try: s", "= os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', '..')) SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party',", "3-tuple (exit code, stdout, stderr). \"\"\" _ValidateAndLogCommand(args, cwd, shell) stderr", "None iter_end_time = (time.time() + iter_timeout) if iter_timeout else None", "None: cwd = '' else: cwd = ':' + cwd", "class TimeoutError(base_error.BaseError): \"\"\"Module-specific timeout exception.\"\"\" def __init__(self, output=None): super(TimeoutError, self).__init__('Timeout')", "given time, this generator will yield None. timeout: An optional", "OSError: pass process.wait() reader_thread.join() _IterProcessStdout = (_IterProcessStdoutQueue if sys.platform ==", "_IterCmdOutputLines( process, cmd, iter_timeout=iter_timeout, timeout=timeout, check_status=check_status) def _IterCmdOutputLines(process, cmd, iter_timeout=None,", "a shell command. Must be True if args is a", "data is None or cur_iter_timeout < 0: yield None iter_end", "timeout=None, check_status=True): buffer_output = '' iter_end = None cur_iter_timeout =", "timeout=None, buffer_size=4096, poll_interval=1): \"\"\"An fcntl-based implementation of _IterProcessStdout.\"\"\" # pylint:", "shell) return Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell, cwd=cwd, env=env) def", "if len(str_output) > 4096 else '') return process.returncode, str_output def", "\"\"\"Open a subprocess to execute a program and returns its", "TimeoutError: if timeout is set and the process does not", "_, _ = select.select([child_fd], [], [], iter_aware_poll_interval) if child_fd in", "for the entire command, in seconds. cwd: If not None,", "'Iteration received no data despite no iter_timeout being set. '", "('\"$%s\"' % var_name).join(parts) return with_substitutions or \"''\" return ' '.join(shrink(part)", "_IterProcessStdout( process, iter_timeout=cur_iter_timeout, timeout=timeout): if iter_timeout: # Check whether the", "subprocess to execute a program and returns its return value.", "is the first element. timeout: the timeout in seconds or", "and continuously yields lines from its output. Args: args: List", "Copyright (c) 2012 The Chromium Authors. All rights reserved. #", "whether this is a suitable replacement for _IterProcessStdoutFcntl on all", "IOError: break stdout_queue.put(output_chunk, True) if not output_chunk and process.poll() is", "code and output. Args: args: A string or a sequence", "in read_fds: data = _read_and_decode(child_fd, buffer_size) if not data: break", "= GetCmdStatusOutputAndError( args, cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr) if stderr: logger.critical('STDERR:", "be interpreted literally. Args: s: The string to quote. Return:", "a string and False if args is a sequence. logfile:", "Return: The string quoted using double quotes. \"\"\" if not", "the remaining length of time in the iteration may take", "the process. Args: args: A string or a sequence of", "if isinstance(args, six.string_types): if not shell: raise Exception('string args must", "process after all output has been read. Yields: The output", "is not None: # If process is closed, keep checking", "_IterProcessStdout.\"\"\" # pylint: disable=too-many-nested-blocks import fcntl try: # Enable non-blocking", "pipe.communicate() return pipe.wait() def RunCmd(args, cwd=None): \"\"\"Opens a subprocess to", "continue break break finally: try: if process.returncode is None: #", "stdout, stderr = GetCmdStatusOutputAndError( args, cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr) if", "subprocess, line by line. Raises: CalledProcessError if check_status is True", "time, in seconds, during which the process must finish. If", "is not None: break reader_thread = threading.Thread(target=read_process_stdout) reader_thread.start() end_time =", "import queue else: import Queue as queue import threading stdout_queue", "\"\"\"Opens a subprocess to execute a program and returns its", "output. Args: args: A string or a sequence of program", "of the string using single quotes. Reliably quote a string", "can be # found in the LICENSE file. \"\"\"A wrapper", "str_output[:4096].rstrip(), '<truncated>' if len(str_output) > 4096 else '') return process.returncode,", "merge_stderr: If True, captures stderr as part of stdout. Returns:", "set usually includes: '$', '`', '\\', '!', '*', and '@'.", "= _read_and_decode(child_fd, buffer_size) if not data: break yield data if", "def __init__(self, output=None): super(TimeoutError, self).__init__('Timeout') self._output = output @property def", "time, this generator will yield None. timeout: An optional length", "shell: raise Exception('array args must be run with shell=False') args", "except queue.Empty: yield None finally: try: if process.returncode is None:", "shell=shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return _IterCmdOutputLines( process, cmd, iter_timeout=iter_timeout, timeout=timeout,", "its output. Args: args: A string or a sequence of", "close_fds = (stdin is None and stdout is None and", "True) if not output_chunk and process.poll() is not None: break", "# Check whether the current iteration has timed out. cur_iter_timeout", "if iter_timeout: iter_end = time.time() + iter_timeout if buffer_output: yield", "a string which may contain unsafe characters (e.g. space, quote,", "question. iter_timeout: An optional length of time, in seconds, to", "= Popen( args, cwd=cwd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) try: for", "is the string or the first item in the args", "|cwd| before it's executed. Returns: Return code from the command", "def output(self): return self._output def _read_and_decode(fd, buffer_size): data = os.read(fd,", "a non-zero exit status. \"\"\" cmd = _ValidateAndLogCommand(args, cwd, shell)", "that defines environment variables for the subprocess. Returns: A process", "process. Args: args: A string or a sequence of program", "program to execute is the string or the first item", "value can be used in a shell command line as", "None while True: if end_time and time.time() > end_time: raise", "between each iteration. If no output is received in the", "stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn, universal_newlines=True, encoding='utf-8', errors='ignore' ) def", "with $var_name Returns: A shell snippet that does not include", "execute is the string or the first item in the", "a suitable replacement for _IterProcessStdoutFcntl on all platforms. \"\"\" #", "check_status is True and the process exited with a non-zero", "If no output is received in the given time, this", "directory will be changed to |cwd| before it's executed. Returns:", "shell=shell, cwd=cwd, env=env) stdout, stderr = pipe.communicate() return (pipe.returncode, stdout,", "output). Raises: TimeoutError on timeout. \"\"\" _ValidateAndLogCommand(args, cwd, shell) output", "that gets to be further interpreted by the shell. The", "Popen(args, stdin=None, stdout=None, stderr=None, shell=None, cwd=None, env=None): # preexec_fn isn't", "defines environment variables for the subprocess. check_status: A boolean indicating", "import threading stdout_queue = queue.Queue() def read_process_stdout(): # TODO(jbudorick): Pick", "shell=False') args = ' '.join(SingleQuote(str(c)) for c in args) if", "may contain unsafe characters (e.g. space, quote, or other special", "and time.time() > iter_end_time: yield None iter_end_time = time.time() +", "of time, in seconds, to wait in between each iteration.", "var_name: The variable that holds var_value. var_value: The string to", "buffer_output = lines.pop() if has_incomplete_line else '' for line in", "cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr) if stderr: logger.critical('STDERR: %s', stderr) logger.debug('STDOUT:", "subprocess to execute a program and returns its output. Args:", "shell=shell, cwd=cwd, env=env) def GetCmdStatusOutputAndError(args, cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes", "program and returns its return value. Args: args: A string", "True: try: output_chunk = _read_and_decode(process.stdout.fileno(), buffer_size) except IOError: break stdout_queue.put(output_chunk,", "None try: while True: if end_time and time.time() > end_time:", "cwd=None, shell=False, env=None): \"\"\"Open a subprocess to execute a program", "stdout) def StartCmd(args, cwd=None, shell=False, env=None): \"\"\"Starts a subprocess and", "type mismatch error. # Ignore any decoding error, so that", "% cmd) # Construct lines to yield from raw data.", "time.time() > end_time: raise TimeoutError() if iter_end_time and time.time() >", "pipe.communicate() return (pipe.returncode, stdout, stderr) class TimeoutError(base_error.BaseError): \"\"\"Module-specific timeout exception.\"\"\"", "returns its exit code and output. Args: args: A string", "subprocess and continuously yields lines from its output. Args: args:", "one token that gets to be interpreted literally. Args: s:", "(e.g. space or quote characters), while retaining some shell features", "TimeoutError() if iter_end_time and time.time() > iter_end_time: yield None iter_end_time", "in lines: yield line if iter_timeout: iter_end = time.time() +", "quote a string which may contain unsafe characters (e.g. space,", "on the shell implementation. This set usually includes: '$', '`',", "If process is closed, keep checking for output data (because", "its output. Args: args: List of arguments to the program,", "'*', and '@'. Args: s: The string to quote. Return:", "= None if iter_timeout: iter_end = time.time() + iter_timeout cur_iter_timeout", "\"\"\" return pipes.quote(s) def DoubleQuote(s): \"\"\"Return an shell-escaped version of", "else None iter_end_time = (time.time() + iter_timeout) if iter_timeout else", "cwd, args) return args def GetCmdStatusAndOutput(args, cwd=None, shell=False, env=None, merge_stderr=False):", "to execute args as a shell command. env: If not", "output has been read. Yields: The output of the subprocess,", "cwd, shell) stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE pipe", "iteration, in seconds. timeout: Timeout for the entire command, in", "and False if args is a sequence. logfile: Optional file-like", "TimeoutError on timeout. \"\"\" _ValidateAndLogCommand(args, cwd, shell) output = six.StringIO()", "while True: if end_time and time.time() > end_time: raise TimeoutError()", "to execute args as a shell command. Must be True", "args = ' '.join(SingleQuote(str(c)) for c in args) if cwd", "data: yield data continue break break finally: try: if process.returncode", "boolean indicating whether to check the exit status of the", "that defines environment variables for the subprocess. Returns: The 2-tuple", "governed by a BSD-style license that can be # found", "is None) preexec_fn = None else: close_fds = True preexec_fn", "iter_end = None cur_iter_timeout = None if iter_timeout: iter_end =", "seconds. timeout: Timeout for the entire command, in seconds. cwd:", "return output def _ValidateAndLogCommand(args, cwd, shell): if isinstance(args, six.string_types): if", "data in _IterProcessStdout( process, iter_timeout=cur_iter_timeout, timeout=timeout): if iter_timeout: # Check", "line. Raises: CalledProcessError if check_status is True and the process", "cmd = _ValidateAndLogCommand(args, cwd, shell) process = Popen( args, cwd=cwd,", "%s' % cmd) # Construct lines to yield from raw", "double quotes. \"\"\" if not s: return '\"\"' elif all(c", "as part of stdout. Returns: The 3-tuple (exit code, stdout,", "child_fd = process.stdout.fileno() fl = fcntl.fcntl(child_fd, fcntl.F_GETFL) fcntl.fcntl(child_fd, fcntl.F_SETFL, fl", "else None try: while True: if end_time and time.time() >", "stderr=subprocess.PIPE, shell=shell, cwd=cwd, env=env) def GetCmdStatusOutputAndError(args, cwd=None, shell=False, env=None, merge_stderr=False):", "and stderr is None) preexec_fn = None else: close_fds =", "sys.platform == 'win32': close_fds = (stdin is None and stdout", "# will be multiple encodings (e.g. adb logcat) return subprocess.Popen(", "env=env) pipe.communicate() return pipe.wait() def RunCmd(args, cwd=None): \"\"\"Opens a subprocess", "This set usually includes: '$', '`', '\\', '!', '*', and", "the process exited with a non-zero exit status. \"\"\" cmd", "= ':' + cwd logger.debug('[host]%s> %s', cwd, args) return args", "that retain their special meaning may depend on the shell", "len(str_output) > 4096 else '') return process.returncode, str_output def IterCmdOutputLines(args,", "else _IterProcessStdoutFcntl) \"\"\"Iterate over a process's stdout. This is intentionally", "shell-escaped version of the string using double quotes. Reliably quote", "None else: close_fds = True preexec_fn = lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL)", "sys.platform == 'win32' else _IterProcessStdoutFcntl) \"\"\"Iterate over a process's stdout.", "is None: cwd = '' else: cwd = ':' +", "import signal import string import subprocess import sys import time", "+ string.digits + '@%_-+=:,./') # Cache the string-escape codec to", "basestrings of data or None. \"\"\" def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None,", "features such as variable interpolation. The returned value can be", "iteration may take precedence. Raises: TimeoutError: if timeout is set", "cwd=cwd, shell=shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return _IterCmdOutputLines( process, cmd, iter_timeout=iter_timeout,", "logger.debug(str(args) + ' ' + (cwd or '')) return Call(args,", "so that caller side always get 'str', # and there", "select.select([child_fd], [], [], iter_aware_poll_interval) if child_fd in read_fds: data =", "_IterCmdOutputLines(process, cmd, iter_timeout=None, timeout=None, check_status=True): buffer_output = '' iter_end =", "set of characters that retain their special meaning may depend", "args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn, universal_newlines=True,", "data.decode('utf-8', errors='ignore') return data def _IterProcessStdoutFcntl(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1):", "stdout. This is intentionally not public. Args: process: The process", "`select.select`. If iter_timeout is set, the remaining length of time", "def _IterCmdOutputLines(process, cmd, iter_timeout=None, timeout=None, check_status=True): buffer_output = '' iter_end", "if process.returncode is None: # Make sure the process doesn't", "# If process is closed, keep checking for output data", "%s', stderr) logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(), '<truncated>' if len(stdout) > 4096", "env=env, merge_stderr=merge_stderr) if stderr: logger.critical('STDERR: %s', stderr) logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(),", "errors are unavoidable, as we # do not know the", "= _ValidateAndLogCommand(args, cwd, shell) process = Popen( args, cwd=cwd, shell=shell,", "time.time() > iter_end_time: yield None iter_end_time = time.time() + iter_timeout", "not None, ( 'Iteration received no data despite no iter_timeout", "whether to check the exit status of the process after", "no output is received in the given time, this generator", "to wait in calls to `select.select`. If iter_timeout is set,", "execute args as a shell command. env: If not None,", "= (stdin is None and stdout is None and stderr", "cur_iter_timeout = iter_timeout for data in _IterProcessStdout( process, iter_timeout=cur_iter_timeout, timeout=timeout):", "the subprocess. merge_stderr: If True, captures stderr as part of", "stderr is None) preexec_fn = None else: close_fds = True", "will be changed to |cwd| before it's executed. shell: Whether", "to replace in cmd_parts with $var_name Returns: A shell snippet", "part of stdout. Returns: The 3-tuple (exit code, stdout, stderr).", "output.getvalue() logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(), '<truncated>' if len(str_output) > 4096 else", "exit status. \"\"\" cmd = _ValidateAndLogCommand(args, cwd, shell) process =", "file. \"\"\"A wrapper for subprocess to make calling shell commands", "space or quote characters), while retaining some shell features such", "executed. Returns: Return code from the command execution. \"\"\" logger.debug(str(args)", "cmd, iter_timeout=None, timeout=None, check_status=True): buffer_output = '' iter_end = None", "\"\"\" # pylint: disable=unused-argument if six.PY3: import queue else: import", "'' iter_end = None cur_iter_timeout = None if iter_timeout: iter_end", "'\"' + s.replace('\"', '\\\\\"') + '\"' def ShrinkToSnippet(cmd_parts, var_name, var_value):", "time, in seconds, to wait in between each iteration. If", "execution. \"\"\" logger.debug(str(args) + ' ' + (cwd or ''))", "signal.signal(signal.SIGPIPE, signal.SIG_DFL) if six.PY2: return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout,", "value.split(var_value)) with_substitutions = ('\"$%s\"' % var_name).join(parts) return with_substitutions or \"''\"", "Popen( args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env) pipe.communicate() return pipe.wait()", "buffer_output.splitlines() buffer_output = lines.pop() if has_incomplete_line else '' for line", "subprocess to make calling shell commands easier.\"\"\" import codecs import", "shell=None, cwd=None, env=None): pipe = Popen( args, stdout=stdout, stderr=stderr, shell=shell,", "in some output there # will be multiple encodings (e.g.", "six.PY2: return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds,", "output(self): return self._output def _read_and_decode(fd, buffer_size): data = os.read(fd, buffer_size)", "a shell snippet for a command using a variable to", "with an # exception. process.kill() except OSError: pass process.wait() def", "in seconds, during which the process must finish. If it", "TODO(jbudorick): Evaluate whether this is a suitable replacement for _IterProcessStdoutFcntl", "holds var_value. var_value: The string to replace in cmd_parts with", "except IOError: break stdout_queue.put(output_chunk, True) if not output_chunk and process.poll()", "import logging import os import pipes import select import signal", "def GetCmdStatusOutputAndError(args, cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes a subprocess and", "for a command using a variable to shrink it. Takes", "yield None iter_end = time.time() + iter_timeout continue else: assert", "time CATAPULT_ROOT_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', '..')) SIX_PATH =", "iteration has timed out. cur_iter_timeout = iter_end - time.time() if", "cwd=None): \"\"\"Opens a subprocess to execute a program and returns", "fcntl.F_SETFL, fl | os.O_NONBLOCK) end_time = (time.time() + timeout) if", "stdout is None and stderr is None) preexec_fn = None", "to shrink it. Takes into account all quoting that needs", "def _read_and_decode(fd, buffer_size): data = os.read(fd, buffer_size) if data and", "merge_stderr=False): \"\"\"Executes a subprocess and returns its exit code and", "if end_time and time.time() > end_time: raise TimeoutError() if iter_end_time", "gets to be further interpreted by the shell. The set", "var_value. var_value: The string to replace in cmd_parts with $var_name", "min(poll_interval, max(0, iter_end_time - time.time())) else: iter_aware_poll_interval = poll_interval read_fds,", "easier.\"\"\" import codecs import logging import os import pipes import", "in cmd_parts) def Popen(args, stdin=None, stdout=None, stderr=None, shell=None, cwd=None, env=None):", "stdout. Returns: The 2-tuple (exit code, stdout). \"\"\" status, stdout,", "args is a sequence. logfile: Optional file-like object that will", "unsafe characters (e.g. space or quote characters), while retaining some", "and process.poll() is not None: break reader_thread = threading.Thread(target=read_process_stdout) reader_thread.start()", "program to execute is the first element. timeout: the timeout", "stderr=None, shell=None, cwd=None, env=None): pipe = Popen( args, stdout=stdout, stderr=stderr,", "by line. Raises: CalledProcessError if check_status is True and the", "def ShrinkToSnippet(cmd_parts, var_name, var_value): \"\"\"Constructs a shell snippet for a", "its exit code, output, and errors. Args: args: A string", "whether the current iteration has timed out. cur_iter_timeout = iter_end", "the process must finish. If it fails to do so,", "(stdin is None and stdout is None and stderr is", "pylint: disable=unexpected-keyword-arg if sys.platform == 'win32': close_fds = (stdin is", "stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE pipe = Popen(", "in between each iteration. If no output is received in", "for the subprocess. Returns: Captures and returns the command's stdout.", "logger = logging.getLogger(__name__) _SafeShellChars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./')", "super(TimeoutError, self).__init__('Timeout') self._output = output @property def output(self): return self._output", "string quoted using single quotes. \"\"\" return pipes.quote(s) def DoubleQuote(s):", "command line as one token that gets to be interpreted", "pipe = Popen( args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env) pipe.communicate()", "The string to replace in cmd_parts with $var_name Returns: A", "threading stdout_queue = queue.Queue() def read_process_stdout(): # TODO(jbudorick): Pick an", "Popen( args, cwd=cwd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) try: for data", "from the command execution. \"\"\" logger.debug(str(args) + ' ' +", "# Cache the string-escape codec to ensure subprocess can find", "output) = GetCmdStatusAndOutput(args, cwd, shell, env) return output def _ValidateAndLogCommand(args,", "as part of stdout. Returns: The 2-tuple (exit code, stdout).", "= stdout_queue.get(True, iter_timeout) if not s: break yield s except", "part in cmd_parts) def Popen(args, stdin=None, stdout=None, stderr=None, shell=None, cwd=None,", "subprocess. merge_stderr: If True, captures stderr as part of stdout.", "'cmd: %s' % cmd) # Construct lines to yield from", "to `select.select`. If iter_timeout is set, the remaining length of", "cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes a subprocess and returns its", "first element. timeout: the timeout in seconds or None to", "not None, a mapping that defines environment variables for the", "a subprocess with a timeout. Args: args: List of arguments", "OSError: pass process.wait() def _IterProcessStdoutQueue(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"A", "returns its return value. Args: args: A string or a", "pipe = Popen( args, stdout=subprocess.PIPE, stderr=stderr, shell=shell, cwd=cwd, env=env) stdout,", "at once. poll_interval: The length of time to wait in", "= Popen( args, stdout=subprocess.PIPE, stderr=stderr, shell=shell, cwd=cwd, env=env) stdout, stderr", "'win32' else _IterProcessStdoutFcntl) \"\"\"Iterate over a process's stdout. This is", "process, iter_timeout=cur_iter_timeout, timeout=timeout): if iter_timeout: # Check whether the current", "contain unsafe characters (e.g. space or quote characters), while retaining", "logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(), '<truncated>' if len(str_output) > 4096 else '')", "# found in the LICENSE file. \"\"\"A wrapper for subprocess", "by the shell. The set of characters that retain their", "logger (which defaults to stdout). \"\"\" (_, output) = GetCmdStatusAndOutput(args,", "that defines environment variables for the subprocess. check_status: A boolean", "# uncaught exception. Decoding errors are unavoidable, as we #", "and the process exited with a non-zero exit status. \"\"\"", "return Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell, cwd=cwd, env=env) def GetCmdStatusOutputAndError(args,", "def _ValidateAndLogCommand(args, cwd, shell): if isinstance(args, six.string_types): if not shell:", "stdout=None, stderr=None, shell=None, cwd=None, env=None): # preexec_fn isn't supported on", "Authors. All rights reserved. # Use of this source code", "break yield s except queue.Empty: yield None finally: try: if", "in seconds. cwd: If not None, the subprocess's current directory", "that does not include setting the variable. \"\"\" def shrink(value):", "stderr) logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(), '<truncated>' if len(stdout) > 4096 else", "which the process must finish. If it fails to do", "Return code from the command execution. \"\"\" logger.debug(str(args) + '", "= '' iter_end = None cur_iter_timeout = None if iter_timeout:", "buffer_output[-1] not in '\\r\\n' lines = buffer_output.splitlines() buffer_output = lines.pop()", "break reader_thread = threading.Thread(target=read_process_stdout) reader_thread.start() end_time = (time.time() + timeout)", "Reliably quote a string which may contain unsafe characters (e.g.", "':' + cwd logger.debug('[host]%s> %s', cwd, args) return args def", "despite no iter_timeout being set. ' 'cmd: %s' % cmd)", "fcntl.F_GETFL) fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK) end_time = (time.time() +", "the string using single quotes. Reliably quote a string which", "snippet for a command using a variable to shrink it.", "'' for line in lines: yield line if iter_timeout: iter_end", "is None and stdout is None and stderr is None)", "iter_timeout for data in _IterProcessStdout( process, iter_timeout=cur_iter_timeout, timeout=timeout): if iter_timeout:", "in _IterProcessStdout( process, iter_timeout=cur_iter_timeout, timeout=timeout): if iter_timeout: # Check whether", "disable=unused-argument if six.PY3: import queue else: import Queue as queue", "None. \"\"\" def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False, logfile=None, env=None): \"\"\"Executes", "and returns a handle to the process. Args: args: A", "= buffer_output.splitlines() buffer_output = lines.pop() if has_incomplete_line else '' for", "_IterProcessStdout. TODO(jbudorick): Evaluate whether this is a suitable replacement for", "not output_chunk and process.poll() is not None: break reader_thread =", "import time CATAPULT_ROOT_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', '..')) SIX_PATH", "for data in _IterProcessStdout(process, timeout=timeout): if logfile: logfile.write(data) output.write(data) except", "string to quote. Return: The string quoted using single quotes.", "all quoting that needs to happen. Args: cmd_parts: A list", "further interpreted by the shell. The set of characters that", "= threading.Thread(target=read_process_stdout) reader_thread.start() end_time = (time.time() + timeout) if timeout", "bytes to read (and thus yield) at once. poll_interval: The", "if six.PY2: return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell,", "to do so, a TimeoutError will be raised. buffer_size: The", "sys import time CATAPULT_ROOT_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', '..'))", "quoted using double quotes. \"\"\" if not s: return '\"\"'", "if buffer_output: yield buffer_output if check_status and process.returncode: raise subprocess.CalledProcessError(process.returncode,", "for line in lines: yield line if iter_timeout: iter_end =", "is not None, ( 'Iteration received no data despite no", "_read_and_decode(process.stdout.fileno(), buffer_size) except IOError: break stdout_queue.put(output_chunk, True) if not output_chunk", "%s', cwd, args) return args def GetCmdStatusAndOutput(args, cwd=None, shell=False, env=None,", "stderr=None, shell=None, cwd=None, env=None): # preexec_fn isn't supported on windows.", "subprocess. Returns: Captures and returns the command's stdout. Prints the", "environment variables for the subprocess. Returns: Captures and returns the", "execute is the first element. timeout: the timeout in seconds", "six.StringIO() process = Popen( args, cwd=cwd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env)", "reads from the child's stdout. child_fd = process.stdout.fileno() fl =", "yield data if process.poll() is not None: # If process", "the command execution. \"\"\" logger.debug(str(args) + ' ' + (cwd", "\"\"\"Executes a subprocess with a timeout. Args: args: List of", "break finally: try: if process.returncode is None: # Make sure", "data (because of timing # issues). while True: read_fds, _,", "of timing # issues). while True: read_fds, _, _ =", "'.join(SingleQuote(str(c)) for c in args) if cwd is None: cwd", "output from the command as it is running. env: If", "iter_end_time = (time.time() + iter_timeout) if iter_timeout else None while", "CATAPULT_ROOT_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', '..')) SIX_PATH = os.path.join(CATAPULT_ROOT_PATH,", "output, and in some output there # will be multiple", "def _IterProcessStdoutQueue(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"A Queue.Queue-based implementation of", "and output. Args: args: A string or a sequence of", "_IterProcessStdoutFcntl) \"\"\"Iterate over a process's stdout. This is intentionally not", "shell: raise Exception('string args must be run with shell=True') else:", "elif all(c in _SafeShellChars for c in s): return s", "process's stdout. This is intentionally not public. Args: process: The", "seconds, to wait in between each iteration. If no output", "str_output def IterCmdOutputLines(args, iter_timeout=None, timeout=None, cwd=None, shell=False, env=None, check_status=True): \"\"\"Executes", "None. timeout: An optional length of time, in seconds, during", "suitable replacement for _IterProcessStdoutFcntl on all platforms. \"\"\" # pylint:", "line by line. Raises: CalledProcessError if check_status is True and", "'!', '*', and '@'. Args: s: The string to quote.", "if timeout is set and the process does not complete.", "iter_aware_poll_interval = min(poll_interval, max(0, iter_end_time - time.time())) else: iter_aware_poll_interval =", "shell=None, cwd=None, env=None): # preexec_fn isn't supported on windows. #", "must finish. If it fails to do so, a TimeoutError", "and time.time() > end_time: raise TimeoutError() if iter_end_time and time.time()", "quote, or other special characters such as '$'). The returned", "shell: Whether to execute args as a shell command. env:", "' '.join(shrink(part) for part in cmd_parts) def Popen(args, stdin=None, stdout=None,", "iter_timeout=iter_timeout, timeout=timeout, check_status=check_status) def _IterCmdOutputLines(process, cmd, iter_timeout=None, timeout=None, check_status=True): buffer_output", "characters that retain their special meaning may depend on the", "stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn, universal_newlines=True, encoding='utf-8', errors='ignore'", "meaning may depend on the shell implementation. This set usually", "not include setting the variable. \"\"\" def shrink(value): parts =", "shell implementation. This set usually includes: '$', '`', '\\', '!',", "If it fails to do so, a TimeoutError will be", "\"\"\"Iterate over a process's stdout. This is intentionally not public.", "reader_thread = threading.Thread(target=read_process_stdout) reader_thread.start() end_time = (time.time() + timeout) if", "iter_timeout=cur_iter_timeout, timeout=timeout): if iter_timeout: # Check whether the current iteration", "string and False if args is a sequence. env: If", "it fails to do so, a TimeoutError will be raised.", "os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', '..')) SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six')", "The set of characters that retain their special meaning may", "# later. Return value doesn't matter. if six.PY2: codecs.lookup('string-escape') def", "stderr=stderr, shell=shell, cwd=cwd, env=env) stdout, stderr = pipe.communicate() return (pipe.returncode,", "= buffer_output[-1] not in '\\r\\n' lines = buffer_output.splitlines() buffer_output =", "arguments to the program, the program to execute is the", "Raises: CalledProcessError if check_status is True and the process exited", "data. buffer_output += data has_incomplete_line = buffer_output[-1] not in '\\r\\n'", "exception.\"\"\" def __init__(self, output=None): super(TimeoutError, self).__init__('Timeout') self._output = output @property", "snippet that does not include setting the variable. \"\"\" def", "Whether to execute args as a shell command. env: If", "is a sequence. env: If not None, a mapping that", "= (time.time() + timeout) if timeout else None iter_end_time =", "raise TimeoutError(output.getvalue()) str_output = output.getvalue() logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(), '<truncated>' if", "matter. if six.PY2: codecs.lookup('string-escape') def SingleQuote(s): \"\"\"Return an shell-escaped version", "the command's stdout. Prints the command's stderr to logger (which", "six.PY3: import queue else: import Queue as queue import threading", "select import signal import string import subprocess import sys import", "stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell, cwd=cwd, env=env) def GetCmdStatusOutputAndError(args, cwd=None, shell=False, env=None,", "if not data: break yield data if process.poll() is not", "process.poll() is not None: break reader_thread = threading.Thread(target=read_process_stdout) reader_thread.start() end_time", "returned value can be used in a shell command line", "one token that gets to be further interpreted by the", "except OSError: pass process.wait() reader_thread.join() _IterProcessStdout = (_IterProcessStdoutQueue if sys.platform", "windows. # pylint: disable=unexpected-keyword-arg if sys.platform == 'win32': close_fds =", "needs to happen. Args: cmd_parts: A list of command arguments.", "and returns the command's stdout. Prints the command's stderr to", "s): return s else: return '\"' + s.replace('\"', '\\\\\"') +", "while retaining some shell features such as variable interpolation. The", "preexec_fn isn't supported on windows. # pylint: disable=unexpected-keyword-arg if sys.platform", "a subprocess and returns its exit code and output. Args:", "not in sys.path: sys.path.append(SIX_PATH) import six from devil import base_error", "iter_timeout: iter_end = time.time() + iter_timeout if buffer_output: yield buffer_output", "fcntl.fcntl(child_fd, fcntl.F_GETFL) fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK) end_time = (time.time()", "while True: try: output_chunk = _read_and_decode(process.stdout.fileno(), buffer_size) except IOError: break", "by a BSD-style license that can be # found in", "\"\"\"A Queue.Queue-based implementation of _IterProcessStdout. TODO(jbudorick): Evaluate whether this is", "= six.StringIO() process = Popen( args, cwd=cwd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,", "| os.O_NONBLOCK) end_time = (time.time() + timeout) if timeout else", "self).__init__('Timeout') self._output = output @property def output(self): return self._output def", "queue.Queue() def read_process_stdout(): # TODO(jbudorick): Pick an appropriate read size", "be run with shell=False') args = ' '.join(SingleQuote(str(c)) for c", "_ = select.select([child_fd], [], [], iter_aware_poll_interval) if child_fd in read_fds:", "that gets to be interpreted literally. Args: s: The string", "mapping that defines environment variables for the subprocess. Returns: A", "is set, the remaining length of time in the iteration", "is the first element. iter_timeout: Timeout for each iteration, in", "data = os.read(fd, buffer_size) if data and six.PY3: data =", "output there # will be multiple encodings (e.g. adb logcat)", "output_chunk and process.poll() is not None: break reader_thread = threading.Thread(target=read_process_stdout)", "or '')) return Call(args, cwd=cwd) def GetCmdOutput(args, cwd=None, shell=False, env=None):", "# pylint: disable=too-many-nested-blocks import fcntl try: # Enable non-blocking reads", "captures stderr as part of stdout. Returns: The 3-tuple (exit", "the subprocess. check_status: A boolean indicating whether to check the", "in seconds, to wait in between each iteration. If no", "> 4096 else '') return (status, stdout) def StartCmd(args, cwd=None,", "timeout) if timeout else None iter_end_time = (time.time() + iter_timeout)", "output=None): super(TimeoutError, self).__init__('Timeout') self._output = output @property def output(self): return", "def DoubleQuote(s): \"\"\"Return an shell-escaped version of the string using", "a subprocess to execute a program and returns its return", "version of the string using double quotes. Reliably quote a", "of the subprocess, line by line. Raises: CalledProcessError if check_status", "All rights reserved. # Use of this source code is", "optional length of time, in seconds, during which the process", "to execute a program and returns its output. Args: args:", "with a timeout. Args: args: List of arguments to the", "and returns its exit code, output, and errors. Args: args:", "or \"''\" return ' '.join(shrink(part) for part in cmd_parts) def", "The program to execute is the string or the first", "be further interpreted by the shell. The set of characters", "(which defaults to stdout). \"\"\" (_, output) = GetCmdStatusAndOutput(args, cwd,", "a program and returns its return value. Args: args: A", "buffer_size): data = os.read(fd, buffer_size) if data and six.PY3: data", "# preexec_fn isn't supported on windows. # pylint: disable=unexpected-keyword-arg if", "for subprocess to make calling shell commands easier.\"\"\" import codecs", "cwd=cwd, env=env) stdout, stderr = pipe.communicate() return (pipe.returncode, stdout, stderr)", "the first element. iter_timeout: Timeout for each iteration, in seconds.", "read size here. while True: try: output_chunk = _read_and_decode(process.stdout.fileno(), buffer_size)", "first element. iter_timeout: Timeout for each iteration, in seconds. timeout:", "= iter_timeout for data in _IterProcessStdout( process, iter_timeout=cur_iter_timeout, timeout=timeout): if", "'@'. Args: s: The string to quote. Return: The string", "args as a shell command. Must be True if args", "not s: return '\"\"' elif all(c in _SafeShellChars for c", "os.O_NONBLOCK) end_time = (time.time() + timeout) if timeout else None", "process doesn't stick around if we fail with an #", "of the string using double quotes. Reliably quote a string", "a sequence of program arguments. The program to execute is", "return with_substitutions or \"''\" return ' '.join(shrink(part) for part in", "lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL) if six.PY2: return subprocess.Popen( args=args, cwd=cwd, stdin=stdin,", "stick around if we fail with an # exception. process.kill()", "such as variable interpolation. The returned value can be used", "> end_time: raise TimeoutError() if iter_end_time and time.time() > iter_end_time:", "with_substitutions = ('\"$%s\"' % var_name).join(parts) return with_substitutions or \"''\" return", "of _IterProcessStdout.\"\"\" # pylint: disable=too-many-nested-blocks import fcntl try: # Enable", "Chromium Authors. All rights reserved. # Use of this source", "as one token that gets to be interpreted literally. Args:", "a mapping that defines environment variables for the subprocess. Returns:", "not data: break yield data if process.poll() is not None:", "False if args is a sequence. env: If not None,", "or cur_iter_timeout < 0: yield None iter_end = time.time() +", "= None cur_iter_timeout = None if iter_timeout: iter_end = time.time()", "shrink it. Takes into account all quoting that needs to", "break yield data if process.poll() is not None: # If", "shell=False, logfile=None, env=None): \"\"\"Executes a subprocess with a timeout. Args:", "and six.PY3: data = data.decode('utf-8', errors='ignore') return data def _IterProcessStdoutFcntl(process,", "the command's stderr to logger (which defaults to stdout). \"\"\"", "buffer_output: yield buffer_output if check_status and process.returncode: raise subprocess.CalledProcessError(process.returncode, cmd)", "received in the given time, this generator will yield None.", "stdout=subprocess.PIPE, stderr=stderr, shell=shell, cwd=cwd, env=env) stdout, stderr = pipe.communicate() return", "contain unsafe characters (e.g. space, quote, or other special characters", "s: The string to quote. Return: The string quoted using", "SIX_PATH not in sys.path: sys.path.append(SIX_PATH) import six from devil import", "token that gets to be interpreted literally. Args: s: The", "process.poll() is not None: # If process is closed, keep", "version of the string using single quotes. Reliably quote a", "sequence. cwd: If not None, the subprocess's current directory will", "env=None): \"\"\"Open a subprocess to execute a program and returns", "TimeoutError() try: s = stdout_queue.get(True, iter_timeout) if not s: break", "timeout in seconds or None to wait forever. cwd: If", "The 2-tuple (exit code, stdout). \"\"\" status, stdout, stderr =", "shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn, universal_newlines=True, encoding='utf-8', errors='ignore' ) def Call(args,", "sequence of program arguments. The program to execute is the", "yield line if iter_timeout: iter_end = time.time() + iter_timeout if", "end_time and time.time() > end_time: raise TimeoutError() if iter_end_time and", "None and stdout is None and stderr is None) preexec_fn", "string.digits + '@%_-+=:,./') # Cache the string-escape codec to ensure", "' '.join(SingleQuote(str(c)) for c in args) if cwd is None:", "data has_incomplete_line = buffer_output[-1] not in '\\r\\n' lines = buffer_output.splitlines()", "will be changed to |cwd| before it's executed. Returns: Return", "string import subprocess import sys import time CATAPULT_ROOT_PATH = os.path.abspath(", "iter_timeout if buffer_output: yield buffer_output if check_status and process.returncode: raise", "The string quoted using double quotes. \"\"\" if not s:", "in sys.path: sys.path.append(SIX_PATH) import six from devil import base_error logger", "' ' + (cwd or '')) return Call(args, cwd=cwd) def", "env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return _IterCmdOutputLines( process, cmd, iter_timeout=iter_timeout, timeout=timeout, check_status=check_status)", "and time.time() > end_time: raise TimeoutError() try: s = stdout_queue.get(True,", "= process.stdout.fileno() fl = fcntl.fcntl(child_fd, fcntl.F_GETFL) fcntl.fcntl(child_fd, fcntl.F_SETFL, fl |", "len(stdout) > 4096 else '') return (status, stdout) def StartCmd(args,", "data in _IterProcessStdout(process, timeout=timeout): if logfile: logfile.write(data) output.write(data) except TimeoutError:", "data if process.poll() is not None: # If process is", "timeout: An optional length of time, in seconds, during which", "the entire command, in seconds. cwd: If not None, the", "The 2-tuple (exit code, output). Raises: TimeoutError on timeout. \"\"\"", "timeout) if timeout else None try: while True: if end_time", "'$'). The returned value can be used in a shell", "to be further interpreted by the shell. The set of", "stdout[:4096].rstrip(), '<truncated>' if len(stdout) > 4096 else '') return (status,", "The variable that holds var_value. var_value: The string to replace", "error, so that caller will not crash due to #", "fcntl-based implementation of _IterProcessStdout.\"\"\" # pylint: disable=too-many-nested-blocks import fcntl try:", "it. Takes into account all quoting that needs to happen.", "args) if cwd is None: cwd = '' else: cwd", "subprocess.Popen. \"\"\" _ValidateAndLogCommand(args, cwd, shell) return Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,", "string using double quotes. Reliably quote a string which may", "If not None, the subprocess's current directory will be changed", "be # found in the LICENSE file. \"\"\"A wrapper for", "to the program, the program to execute is the first", "poll_interval=1): \"\"\"A Queue.Queue-based implementation of _IterProcessStdout. TODO(jbudorick): Evaluate whether this", "current iteration has timed out. cur_iter_timeout = iter_end - time.time()", "code is governed by a BSD-style license that can be", "to execute is the string or the first item in", "args, cwd=cwd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) try: for data in", "quote characters), while retaining some shell features such as variable", "command arguments. var_name: The variable that holds var_value. var_value: The", "in value.split(var_value)) with_substitutions = ('\"$%s\"' % var_name).join(parts) return with_substitutions or", "(e.g. adb logcat) return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr,", "# opens stdout in text mode, so that caller side", "= output @property def output(self): return self._output def _read_and_decode(fd, buffer_size):", "cmd, iter_timeout=iter_timeout, timeout=timeout, check_status=check_status) def _IterCmdOutputLines(process, cmd, iter_timeout=None, timeout=None, check_status=True):", "not know the encoding of the output, and in some", "else '') return process.returncode, str_output def IterCmdOutputLines(args, iter_timeout=None, timeout=None, cwd=None,", "string-escape codec to ensure subprocess can find it # later.", "timeout exception.\"\"\" def __init__(self, output=None): super(TimeoutError, self).__init__('Timeout') self._output = output", "max(0, iter_end_time - time.time())) else: iter_aware_poll_interval = poll_interval read_fds, _,", "happen. Args: cmd_parts: A list of command arguments. var_name: The", "replace in cmd_parts with $var_name Returns: A shell snippet that", "the process after all output has been read. Yields: The", "quotes. Reliably quote a string which may contain unsafe characters", "logging.getLogger(__name__) _SafeShellChars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./') # Cache", "None if iter_timeout: iter_end = time.time() + iter_timeout cur_iter_timeout =", "captures stderr as part of stdout. Returns: The 2-tuple (exit", "Popen( args, cwd=cwd, shell=shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return _IterCmdOutputLines( process,", "implementation. This set usually includes: '$', '`', '\\', '!', '*',", "Check whether the current iteration has timed out. cur_iter_timeout =", "forever. cwd: If not None, the subprocess's current directory will", "' 'cmd: %s' % cmd) # Construct lines to yield", "cwd, shell): if isinstance(args, six.string_types): if not shell: raise Exception('string", "to make calling shell commands easier.\"\"\" import codecs import logging", "if timeout else None iter_end_time = (time.time() + iter_timeout) if", "args is a string and False if args is a", "return (status, stdout) def StartCmd(args, cwd=None, shell=False, env=None): \"\"\"Starts a", "quote. Return: The string quoted using double quotes. \"\"\" if", "or the first item in the args sequence. cwd: If", "env=env) stdout, stderr = pipe.communicate() return (pipe.returncode, stdout, stderr) class", "a subprocess and returns a handle to the process. Args:", "iter_end = time.time() + iter_timeout cur_iter_timeout = iter_timeout for data", "time in the iteration may take precedence. Raises: TimeoutError: if", "exit status of the process after all output has been", "command. env: If not None, a mapping that defines environment", "wait forever. cwd: If not None, the subprocess's current directory", "we # do not know the encoding of the output,", "variable to shrink it. Takes into account all quoting that", "iter_end_time - time.time())) else: iter_aware_poll_interval = poll_interval read_fds, _, _", "cwd=cwd) def GetCmdOutput(args, cwd=None, shell=False, env=None): \"\"\"Open a subprocess to", "pylint: disable=unused-argument if six.PY3: import queue else: import Queue as", "may take precedence. Raises: TimeoutError: if timeout is set and", "as variable interpolation. The returned value can be used in", "$var_name Returns: A shell snippet that does not include setting", "s.replace('\"', '\\\\\"') + '\"' def ShrinkToSnippet(cmd_parts, var_name, var_value): \"\"\"Constructs a", "seconds or None to wait forever. cwd: If not None,", "shell command. Must be True if args is a string", "has been read. Yields: The output of the subprocess, line", "'six') if SIX_PATH not in sys.path: sys.path.append(SIX_PATH) import six from", "_ValidateAndLogCommand(args, cwd, shell) stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE", "# do not know the encoding of the output, and", "'.join(shrink(part) for part in cmd_parts) def Popen(args, stdin=None, stdout=None, stderr=None,", "None: # If process is closed, keep checking for output", "on windows. # pylint: disable=unexpected-keyword-arg if sys.platform == 'win32': close_fds", "cur_iter_timeout = None if iter_timeout: iter_end = time.time() + iter_timeout", "a mapping that defines environment variables for the subprocess. check_status:", "break break finally: try: if process.returncode is None: # Make", "if not output_chunk and process.poll() is not None: break reader_thread", "errors. Args: args: A string or a sequence of program", "cwd=None, env=None): pipe = Popen( args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd,", "Construct lines to yield from raw data. buffer_output += data", "arguments. var_name: The variable that holds var_value. var_value: The string", "var_name).join(parts) return with_substitutions or \"''\" return ' '.join(shrink(part) for part", "in _SafeShellChars for c in s): return s else: return", "doesn't stick around if we fail with an # exception.", "its exit code and output. Args: args: A string or", "variables for the subprocess. Returns: Captures and returns the command's", "(because of timing # issues). while True: read_fds, _, _", "+ timeout) if timeout else None iter_end_time = (time.time() +", "= output.getvalue() logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(), '<truncated>' if len(str_output) > 4096", "to yield from raw data. buffer_output += data has_incomplete_line =", "BSD-style license that can be # found in the LICENSE", "timing # issues). while True: read_fds, _, _ = select.select([child_fd],", "parts = (x and SingleQuote(x) for x in value.split(var_value)) with_substitutions", "must be run with shell=False') args = ' '.join(SingleQuote(str(c)) for", "+ iter_timeout if iter_end_time: iter_aware_poll_interval = min(poll_interval, max(0, iter_end_time -", "logger.critical('STDERR: %s', stderr) logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(), '<truncated>' if len(stdout) >", "stdout. child_fd = process.stdout.fileno() fl = fcntl.fcntl(child_fd, fcntl.F_GETFL) fcntl.fcntl(child_fd, fcntl.F_SETFL,", "shell=False, env=None, check_status=True): \"\"\"Executes a subprocess and continuously yields lines", "be True if args is a string and False if", "execute a program and returns its output. Args: args: A", "# pylint: disable=unused-argument if six.PY3: import queue else: import Queue", "handle to the process. Args: args: A string or a", "returns a handle to the process. Args: args: A string", "of stdout. Returns: The 2-tuple (exit code, stdout). \"\"\" status,", "yield None. timeout: An optional length of time, in seconds,", "variables for the subprocess. merge_stderr: If True, captures stderr as", "shell) output = six.StringIO() process = Popen( args, cwd=cwd, shell=shell,", "return process.returncode, str_output def IterCmdOutputLines(args, iter_timeout=None, timeout=None, cwd=None, shell=False, env=None,", "\"\"\"Executes a subprocess and continuously yields lines from its output.", "encoding='utf-8', errors='ignore' ) def Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None):", "s = stdout_queue.get(True, iter_timeout) if not s: break yield s", "program and returns its output. Args: args: A string or", "args def GetCmdStatusAndOutput(args, cwd=None, shell=False, env=None, merge_stderr=False): \"\"\"Executes a subprocess", "A process handle from subprocess.Popen. \"\"\" _ValidateAndLogCommand(args, cwd, shell) return", "a mapping that defines environment variables for the subprocess. merge_stderr:", "@property def output(self): return self._output def _read_and_decode(fd, buffer_size): data =", "try: while True: if end_time and time.time() > end_time: raise", "mode, so that caller side always get 'str', # and", "subprocess import sys import time CATAPULT_ROOT_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '..',", "know the encoding of the output, and in some output", "yield s except queue.Empty: yield None finally: try: if process.returncode", "[], iter_aware_poll_interval) if child_fd in read_fds: data = _read_and_decode(child_fd, buffer_size)", "a string and False if args is a sequence. env:", "= _read_and_decode(child_fd, buffer_size) if data: yield data continue break break", "sequence. logfile: Optional file-like object that will receive output from", "variables for the subprocess. check_status: A boolean indicating whether to", "child_fd in read_fds: data = _read_and_decode(child_fd, buffer_size) if not data:", "with an # exception. process.kill() except OSError: pass process.wait() reader_thread.join()", "None or cur_iter_timeout < 0: yield None iter_end = time.time()", "will not crash due to # uncaught exception. Decoding errors", "setting the variable. \"\"\" def shrink(value): parts = (x and", "args must be run with shell=True') else: if shell: raise", "stdout_queue.get(True, iter_timeout) if not s: break yield s except queue.Empty:", "time.time())) else: iter_aware_poll_interval = poll_interval read_fds, _, _ = select.select([child_fd],", "def IterCmdOutputLines(args, iter_timeout=None, timeout=None, cwd=None, shell=False, env=None, check_status=True): \"\"\"Executes a", "is None or cur_iter_timeout < 0: yield None iter_end =", "closed, keep checking for output data (because of timing #", "which may contain unsafe characters (e.g. space or quote characters),", "process = Popen( args, cwd=cwd, shell=shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return", "if check_status is True and the process exited with a", "interpreted by the shell. The set of characters that retain", "subprocess. Returns: A process handle from subprocess.Popen. \"\"\" _ValidateAndLogCommand(args, cwd,", "public. Args: process: The process in question. iter_timeout: An optional", "the command as it is running. env: If not None,", "double quotes. Reliably quote a string which may contain unsafe", "Queue as queue import threading stdout_queue = queue.Queue() def read_process_stdout():", "SingleQuote(s): \"\"\"Return an shell-escaped version of the string using single", "= frozenset(string.ascii_letters + string.digits + '@%_-+=:,./') # Cache the string-escape", "IterCmdOutputLines(args, iter_timeout=None, timeout=None, cwd=None, shell=False, env=None, check_status=True): \"\"\"Executes a subprocess", "= os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six') if SIX_PATH not in sys.path: sys.path.append(SIX_PATH)", "# Construct lines to yield from raw data. buffer_output +=", "\"\"\"Module-specific timeout exception.\"\"\" def __init__(self, output=None): super(TimeoutError, self).__init__('Timeout') self._output =", "Queue.Queue-based implementation of _IterProcessStdout. TODO(jbudorick): Evaluate whether this is a", "Returns: The 2-tuple (exit code, stdout). \"\"\" status, stdout, stderr", "shell) process = Popen( args, cwd=cwd, shell=shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)", "TimeoutError(base_error.BaseError): \"\"\"Module-specific timeout exception.\"\"\" def __init__(self, output=None): super(TimeoutError, self).__init__('Timeout') self._output", "or quote characters), while retaining some shell features such as", "import subprocess import sys import time CATAPULT_ROOT_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__),", "run with shell=True') else: if shell: raise Exception('array args must", "in question. iter_timeout: An optional length of time, in seconds,", "shell) stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE pipe =", "A string or a sequence of program arguments. The program", "the first element. timeout: the timeout in seconds or None", "in _IterProcessStdout(process, timeout=timeout): if logfile: logfile.write(data) output.write(data) except TimeoutError: raise", "= subprocess.STDOUT if merge_stderr else subprocess.PIPE pipe = Popen( args,", "opens stdout in text mode, so that caller side always", "= Popen( args, cwd=cwd, shell=shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return _IterCmdOutputLines(", "isn't supported on windows. # pylint: disable=unexpected-keyword-arg if sys.platform ==", ") def Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None): pipe =", "stdout. Prints the command's stderr to logger (which defaults to", "if iter_timeout else None while True: if end_time and time.time()", "variable interpolation. The returned value can be used in a", "args as a shell command. env: If not None, a", "from the command as it is running. env: If not", "timeout else None try: while True: if end_time and time.time()", "timeout=None, buffer_size=4096, poll_interval=1): \"\"\"A Queue.Queue-based implementation of _IterProcessStdout. TODO(jbudorick): Evaluate", "'..', '..')) SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six') if SIX_PATH not", "file-like object that will receive output from the command as", "stdout. Returns: The 3-tuple (exit code, stdout, stderr). \"\"\" _ValidateAndLogCommand(args,", "buffer_size) if data: yield data continue break break finally: try:", "handle from subprocess.Popen. \"\"\" _ValidateAndLogCommand(args, cwd, shell) return Popen( args,", "'\\r\\n' lines = buffer_output.splitlines() buffer_output = lines.pop() if has_incomplete_line else", "(pipe.returncode, stdout, stderr) class TimeoutError(base_error.BaseError): \"\"\"Module-specific timeout exception.\"\"\" def __init__(self,", "- time.time())) else: iter_aware_poll_interval = poll_interval read_fds, _, _ =", "TimeoutError(output.getvalue()) str_output = output.getvalue() logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(), '<truncated>' if len(str_output)", "pipe.wait() def RunCmd(args, cwd=None): \"\"\"Opens a subprocess to execute a", "args, cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr) if stderr: logger.critical('STDERR: %s', stderr)", "Must be True if args is a string and False", "'<truncated>' if len(str_output) > 4096 else '') return process.returncode, str_output", "once. poll_interval: The length of time to wait in calls", "the child's stdout. child_fd = process.stdout.fileno() fl = fcntl.fcntl(child_fd, fcntl.F_GETFL)", "the shell. The set of characters that retain their special", "in args) if cwd is None: cwd = '' else:", "stdout). \"\"\" (_, output) = GetCmdStatusAndOutput(args, cwd, shell, env) return", "is a string and False if args is a sequence.", "= queue.Queue() def read_process_stdout(): # TODO(jbudorick): Pick an appropriate read", "code, stdout, stderr). \"\"\" _ValidateAndLogCommand(args, cwd, shell) stderr = subprocess.STDOUT", "a variable to shrink it. Takes into account all quoting", "_read_and_decode(fd, buffer_size): data = os.read(fd, buffer_size) if data and six.PY3:", "quoted using single quotes. \"\"\" return pipes.quote(s) def DoubleQuote(s): \"\"\"Return", "buffer_size: The maximum number of bytes to read (and thus", "+ iter_timeout continue else: assert data is not None, (", "cmd_parts) def Popen(args, stdin=None, stdout=None, stderr=None, shell=None, cwd=None, env=None): #", "TimeoutError: raise TimeoutError(output.getvalue()) str_output = output.getvalue() logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(), '<truncated>'", "cmd_parts: A list of command arguments. var_name: The variable that", "the subprocess. Returns: A process handle from subprocess.Popen. \"\"\" _ValidateAndLogCommand(args,", "space, quote, or other special characters such as '$'). The", "in a shell command line as one token that gets", "None iter_end_time = time.time() + iter_timeout if iter_end_time: iter_aware_poll_interval =", "pipes.quote(s) def DoubleQuote(s): \"\"\"Return an shell-escaped version of the string", "\"\"\" _ValidateAndLogCommand(args, cwd, shell) return Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell,", "= None else: close_fds = True preexec_fn = lambda: signal.signal(signal.SIGPIPE,", "Ignore any decoding error, so that caller will not crash", "encoding of the output, and in some output there #", "find it # later. Return value doesn't matter. if six.PY2:", "buffer_size=4096, poll_interval=1): \"\"\"An fcntl-based implementation of _IterProcessStdout.\"\"\" # pylint: disable=too-many-nested-blocks", "os.read(fd, buffer_size) if data and six.PY3: data = data.decode('utf-8', errors='ignore')", "to execute a program and returns its return value. Args:", "\"\"\"A wrapper for subprocess to make calling shell commands easier.\"\"\"", "import pipes import select import signal import string import subprocess", "timeout, cwd=None, shell=False, logfile=None, env=None): \"\"\"Executes a subprocess with a", "timeout: the timeout in seconds or None to wait forever.", "env=env, preexec_fn=preexec_fn ) else: # opens stdout in text mode,", "process.returncode, str_output def IterCmdOutputLines(args, iter_timeout=None, timeout=None, cwd=None, shell=False, env=None, check_status=True):", "stdout=None, stderr=None, shell=None, cwd=None, env=None): pipe = Popen( args, stdout=stdout,", "non-blocking reads from the child's stdout. child_fd = process.stdout.fileno() fl", "= time.time() + iter_timeout if iter_end_time: iter_aware_poll_interval = min(poll_interval, max(0,", "account all quoting that needs to happen. Args: cmd_parts: A", "gets to be interpreted literally. Args: s: The string to", "_SafeShellChars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./') # Cache the", "stderr = GetCmdStatusOutputAndError( args, cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr) if stderr:", "shell=shell, cwd=cwd, env=env) pipe.communicate() return pipe.wait() def RunCmd(args, cwd=None): \"\"\"Opens", "ShrinkToSnippet(cmd_parts, var_name, var_value): \"\"\"Constructs a shell snippet for a command", "process.wait() reader_thread.join() _IterProcessStdout = (_IterProcessStdoutQueue if sys.platform == 'win32' else", "cwd, shell) process = Popen( args, cwd=cwd, shell=shell, env=env, stdout=subprocess.PIPE,", "if logfile: logfile.write(data) output.write(data) except TimeoutError: raise TimeoutError(output.getvalue()) str_output =", "If not None, a mapping that defines environment variables for", "TODO(jbudorick): Pick an appropriate read size here. while True: try:", "codec to ensure subprocess can find it # later. Return", "A boolean indicating whether to check the exit status of", "six.PY2: codecs.lookup('string-escape') def SingleQuote(s): \"\"\"Return an shell-escaped version of the", "usually includes: '$', '`', '\\', '!', '*', and '@'. Args:", "six.string_types): if not shell: raise Exception('string args must be run", "= ('\"$%s\"' % var_name).join(parts) return with_substitutions or \"''\" return '", "using single quotes. Reliably quote a string which may contain", "raised. buffer_size: The maximum number of bytes to read (and", "always get 'str', # and there will be no type", "Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell, cwd=cwd, env=env) def GetCmdStatusOutputAndError(args, cwd=None,", "> iter_end_time: yield None iter_end_time = time.time() + iter_timeout if", "and returns its exit code and output. Args: args: A", "multiple encodings (e.g. adb logcat) return subprocess.Popen( args=args, cwd=cwd, stdin=stdin,", "changed to |cwd| before it's executed. shell: Whether to execute", "string which may contain unsafe characters (e.g. space, quote, or", "not None: # If process is closed, keep checking for", "raise Exception('array args must be run with shell=False') args =", "iter_timeout is set, the remaining length of time in the", "a timeout. Args: args: List of arguments to the program,", "Takes into account all quoting that needs to happen. Args:", "= (_IterProcessStdoutQueue if sys.platform == 'win32' else _IterProcessStdoutFcntl) \"\"\"Iterate over", "characters (e.g. space, quote, or other special characters such as", "in the iteration may take precedence. Raises: TimeoutError: if timeout", "if six.PY2: codecs.lookup('string-escape') def SingleQuote(s): \"\"\"Return an shell-escaped version of", "characters), while retaining some shell features such as variable interpolation.", "no type mismatch error. # Ignore any decoding error, so", "args: A string or a sequence of program arguments. The", "issues). while True: read_fds, _, _ = select.select([child_fd], [], [],", "def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False, logfile=None, env=None): \"\"\"Executes a subprocess", "This is intentionally not public. Args: process: The process in", "Call(args, cwd=cwd) def GetCmdOutput(args, cwd=None, shell=False, env=None): \"\"\"Open a subprocess", "later. Return value doesn't matter. if six.PY2: codecs.lookup('string-escape') def SingleQuote(s):", "True, captures stderr as part of stdout. Returns: The 3-tuple", "def read_process_stdout(): # TODO(jbudorick): Pick an appropriate read size here.", "the iteration may take precedence. Raises: TimeoutError: if timeout is", "process.stdout.fileno() fl = fcntl.fcntl(child_fd, fcntl.F_GETFL) fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)", "characters such as '$'). The returned value can be used", "GetCmdOutput(args, cwd=None, shell=False, env=None): \"\"\"Open a subprocess to execute a", "of program arguments. The program to execute is the string", "True if args is a string and False if args", "and returns its return value. Args: args: A string or", "if child_fd in read_fds: data = _read_and_decode(child_fd, buffer_size) if not", "if timeout else None try: while True: if end_time and", "value. Args: args: A string or a sequence of program", "string to quote. Return: The string quoted using double quotes.", "receive output from the command as it is running. env:", "code, output). Raises: TimeoutError on timeout. \"\"\" _ValidateAndLogCommand(args, cwd, shell)", "execute args as a shell command. Must be True if", "import os import pipes import select import signal import string", "a command using a variable to shrink it. Takes into", "return _IterCmdOutputLines( process, cmd, iter_timeout=iter_timeout, timeout=timeout, check_status=check_status) def _IterCmdOutputLines(process, cmd,", "cmd) # Construct lines to yield from raw data. buffer_output", "True: read_fds, _, _ = select.select([child_fd], [], [], iter_aware_poll_interval) if", "for each iteration, in seconds. timeout: Timeout for the entire", "import select import signal import string import subprocess import sys", "subprocess with a timeout. Args: args: List of arguments to", "_read_and_decode(child_fd, buffer_size) if data: yield data continue break break finally:", "(time.time() + timeout) if timeout else None try: while True:", "read_fds: data = _read_and_decode(child_fd, buffer_size) if not data: break yield", "None finally: try: if process.returncode is None: # Make sure", "adb logcat) return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell,", "does not complete. Yields: basestrings of data or None. \"\"\"", "caller will not crash due to # uncaught exception. Decoding", "iter_end_time: yield None iter_end_time = time.time() + iter_timeout if iter_end_time:", "and there will be no type mismatch error. # Ignore", "subprocess can find it # later. Return value doesn't matter.", "shell=False, env=None): \"\"\"Starts a subprocess and returns a handle to", "Returns: The 3-tuple (exit code, stdout, stderr). \"\"\" _ValidateAndLogCommand(args, cwd,", "os.path.join(os.path.dirname(__file__), '..', '..', '..')) SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six') if", "If True, captures stderr as part of stdout. Returns: The", "'<truncated>' if len(stdout) > 4096 else '') return (status, stdout)", "Yields: basestrings of data or None. \"\"\" def GetCmdStatusAndOutputWithTimeout(args, timeout,", "if SIX_PATH not in sys.path: sys.path.append(SIX_PATH) import six from devil", "( 'Iteration received no data despite no iter_timeout being set.", "If iter_timeout is set, the remaining length of time in", "% var_name).join(parts) return with_substitutions or \"''\" return ' '.join(shrink(part) for", "= lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL) if six.PY2: return subprocess.Popen( args=args, cwd=cwd,", "in the given time, this generator will yield None. timeout:", "characters (e.g. space or quote characters), while retaining some shell", "of bytes to read (and thus yield) at once. poll_interval:", "'..', '..', '..')) SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six') if SIX_PATH", "its return value. Args: args: A string or a sequence", "fails to do so, a TimeoutError will be raised. buffer_size:", "quotes. \"\"\" if not s: return '\"\"' elif all(c in", "# exception. process.kill() except OSError: pass process.wait() def _IterProcessStdoutQueue(process, iter_timeout=None,", "GetCmdStatusAndOutput(args, cwd, shell, env) return output def _ValidateAndLogCommand(args, cwd, shell):", "supported on windows. # pylint: disable=unexpected-keyword-arg if sys.platform == 'win32':", "import Queue as queue import threading stdout_queue = queue.Queue() def", "The process in question. iter_timeout: An optional length of time,", "SingleQuote(x) for x in value.split(var_value)) with_substitutions = ('\"$%s\"' % var_name).join(parts)", "%s%s', stdout[:4096].rstrip(), '<truncated>' if len(stdout) > 4096 else '') return", "other special characters such as '$'). The returned value can", "= os.read(fd, buffer_size) if data and six.PY3: data = data.decode('utf-8',", "do not know the encoding of the output, and in", "execute is the first element. iter_timeout: Timeout for each iteration,", "exception. process.kill() except OSError: pass process.wait() def _IterProcessStdoutQueue(process, iter_timeout=None, timeout=None,", "None) preexec_fn = None else: close_fds = True preexec_fn =", "2012 The Chromium Authors. All rights reserved. # Use of", "includes: '$', '`', '\\', '!', '*', and '@'. Args: s:", "depend on the shell implementation. This set usually includes: '$',", "CalledProcessError if check_status is True and the process exited with", "timeout. Args: args: List of arguments to the program, the", "# TODO(jbudorick): Pick an appropriate read size here. while True:", "read_process_stdout(): # TODO(jbudorick): Pick an appropriate read size here. while", "end_time: raise TimeoutError() try: s = stdout_queue.get(True, iter_timeout) if not", "timeout. \"\"\" _ValidateAndLogCommand(args, cwd, shell) output = six.StringIO() process =", "return self._output def _read_and_decode(fd, buffer_size): data = os.read(fd, buffer_size) if", "interpolation. The returned value can be used in a shell", "stderr = pipe.communicate() return (pipe.returncode, stdout, stderr) class TimeoutError(base_error.BaseError): \"\"\"Module-specific", "\"\"\"Executes a subprocess and returns its exit code and output.", "generator will yield None. timeout: An optional length of time,", "defines environment variables for the subprocess. Returns: The 2-tuple (exit", "args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env) pipe.communicate() return pipe.wait() def", "so that caller will not crash due to # uncaught", "from its output. Args: args: List of arguments to the", "4096 else '') return (status, stdout) def StartCmd(args, cwd=None, shell=False,", "iter_timeout continue else: assert data is not None, ( 'Iteration", "+ ' ' + (cwd or '')) return Call(args, cwd=cwd)", "entire command, in seconds. cwd: If not None, the subprocess's", "running. env: If not None, a mapping that defines environment", "quote a string which may contain unsafe characters (e.g. space", ") else: # opens stdout in text mode, so that", "read_fds, _, _ = select.select([child_fd], [], [], iter_aware_poll_interval) if child_fd", "to the process. Args: args: A string or a sequence", "_IterProcessStdoutFcntl(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"An fcntl-based implementation of _IterProcessStdout.\"\"\"", "self._output def _read_and_decode(fd, buffer_size): data = os.read(fd, buffer_size) if data", "args, stdout=subprocess.PIPE, stderr=stderr, shell=shell, cwd=cwd, env=env) stdout, stderr = pipe.communicate()", "around if we fail with an # exception. process.kill() except", "calls to `select.select`. If iter_timeout is set, the remaining length", "env=None): \"\"\"Executes a subprocess with a timeout. Args: args: List", "The string to quote. Return: The string quoted using double", "found in the LICENSE file. \"\"\"A wrapper for subprocess to", "env=None, check_status=True): \"\"\"Executes a subprocess and continuously yields lines from", "the subprocess's current directory will be changed to |cwd| before", "return data def _IterProcessStdoutFcntl(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"An fcntl-based", "lines from its output. Args: args: List of arguments to", "self._output = output @property def output(self): return self._output def _read_and_decode(fd,", "there will be no type mismatch error. # Ignore any", "Args: process: The process in question. iter_timeout: An optional length", "process.kill() except OSError: pass process.wait() def _IterProcessStdoutQueue(process, iter_timeout=None, timeout=None, buffer_size=4096,", "env=env) try: for data in _IterProcessStdout(process, timeout=timeout): if logfile: logfile.write(data)", "returns its output. Args: args: A string or a sequence", "_SafeShellChars for c in s): return s else: return '\"'", "pylint: disable=too-many-nested-blocks import fcntl try: # Enable non-blocking reads from", "current directory will be changed to |cwd| before it's executed.", "output is received in the given time, this generator will", "to ensure subprocess can find it # later. Return value", "logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(), '<truncated>' if len(stdout) > 4096 else '')", "using double quotes. Reliably quote a string which may contain", "return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env,", "output = six.StringIO() process = Popen( args, cwd=cwd, shell=shell, stdout=subprocess.PIPE,", "cwd logger.debug('[host]%s> %s', cwd, args) return args def GetCmdStatusAndOutput(args, cwd=None,", "GetCmdStatusOutputAndError( args, cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr) if stderr: logger.critical('STDERR: %s',", "stderr to logger (which defaults to stdout). \"\"\" (_, output)", "a sequence. logfile: Optional file-like object that will receive output", "be changed to |cwd| before it's executed. shell: Whether to", "+ timeout) if timeout else None try: while True: if", "command execution. \"\"\" logger.debug(str(args) + ' ' + (cwd or", "iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): \"\"\"A Queue.Queue-based implementation of _IterProcessStdout. TODO(jbudorick):", "(and thus yield) at once. poll_interval: The length of time", "frozenset(string.ascii_letters + string.digits + '@%_-+=:,./') # Cache the string-escape codec", "disable=unexpected-keyword-arg if sys.platform == 'win32': close_fds = (stdin is None", "Timeout for the entire command, in seconds. cwd: If not", "a shell command line as one token that gets to", "such as '$'). The returned value can be used in", "is running. env: If not None, a mapping that defines", "s else: return '\"' + s.replace('\"', '\\\\\"') + '\"' def", "code from the command execution. \"\"\" logger.debug(str(args) + ' '", "None, a mapping that defines environment variables for the subprocess.", "logging import os import pipes import select import signal import", "replacement for _IterProcessStdoutFcntl on all platforms. \"\"\" # pylint: disable=unused-argument", "None cur_iter_timeout = None if iter_timeout: iter_end = time.time() +", "shell=False, env=None, merge_stderr=False): \"\"\"Executes a subprocess and returns its exit", "command. Must be True if args is a string and", "check the exit status of the process after all output", "will be raised. buffer_size: The maximum number of bytes to", "set and the process does not complete. Yields: basestrings of", "= logging.getLogger(__name__) _SafeShellChars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./') #", "process, cmd, iter_timeout=iter_timeout, timeout=timeout, check_status=check_status) def _IterCmdOutputLines(process, cmd, iter_timeout=None, timeout=None,", "string which may contain unsafe characters (e.g. space or quote", "line in lines: yield line if iter_timeout: iter_end = time.time()", "iter_timeout if iter_end_time: iter_aware_poll_interval = min(poll_interval, max(0, iter_end_time - time.time()))", "iter_timeout) if iter_timeout else None while True: if end_time and", "changed to |cwd| before it's executed. Returns: Return code from", "be run with shell=True') else: if shell: raise Exception('array args", "== 'win32': close_fds = (stdin is None and stdout is" ]
[ "# Generated by Django 4.0.2 on 2022-02-26 15:52 from django.db", "models.BooleanField(default=True)), ], options={ 'verbose_name': 'Payment Selection', 'verbose_name_plural': 'Payment Selections', },", "initial = True dependencies = [ ] operations = [", "), migrations.CreateModel( name='PaymentSelections', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name',", "name='PaymentSelections', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='Required', max_length=255,", "] operations = [ migrations.CreateModel( name='FundingOptions', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True,", "verbose_name='funding timeframe')), ('funding_window', models.CharField(help_text='Required', max_length=255, verbose_name='funding window')), ], options={ 'verbose_name':", "primary_key=True, serialize=False, verbose_name='ID')), ('funding_name', models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')), ('funding_price', models.DecimalField(decimal_places=2, help_text='Required',", "options={ 'verbose_name': 'Funding Option', 'verbose_name_plural': 'Funding Options', }, ), migrations.CreateModel(", "verbose_name='name')), ('is_active', models.BooleanField(default=True)), ], options={ 'verbose_name': 'Payment Selection', 'verbose_name_plural': 'Payment", "Generated by Django 4.0.2 on 2022-02-26 15:52 from django.db import", "timeframe')), ('funding_window', models.CharField(help_text='Required', max_length=255, verbose_name='funding window')), ], options={ 'verbose_name': 'Funding", "[ migrations.CreateModel( name='FundingOptions', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('funding_name',", "[ ] operations = [ migrations.CreateModel( name='FundingOptions', fields=[ ('id', models.BigAutoField(auto_created=True,", "], options={ 'verbose_name': 'Funding Option', 'verbose_name_plural': 'Funding Options', }, ),", "max_length=255, verbose_name='funding_name')), ('funding_price', models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000, verbose_name='funding price')), ('funding_timeframe', models.CharField(help_text='Required',", "models.CharField(help_text='Required', max_length=255, verbose_name='funding window')), ], options={ 'verbose_name': 'Funding Option', 'verbose_name_plural':", "migrations, models class Migration(migrations.Migration): initial = True dependencies = [", "name='FundingOptions', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('funding_name', models.CharField(help_text='Required', max_length=255,", "fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('funding_name', models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')),", "on 2022-02-26 15:52 from django.db import migrations, models class Migration(migrations.Migration):", "verbose_name='funding price')), ('funding_timeframe', models.CharField(help_text='Required', max_length=255, verbose_name='funding timeframe')), ('funding_window', models.CharField(help_text='Required', max_length=255,", "serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='Required', max_length=255, verbose_name='name')), ('is_active', models.BooleanField(default=True)), ], options={", "price')), ('funding_timeframe', models.CharField(help_text='Required', max_length=255, verbose_name='funding timeframe')), ('funding_window', models.CharField(help_text='Required', max_length=255, verbose_name='funding", "('funding_timeframe', models.CharField(help_text='Required', max_length=255, verbose_name='funding timeframe')), ('funding_window', models.CharField(help_text='Required', max_length=255, verbose_name='funding window')),", "max_length=255, verbose_name='name')), ('is_active', models.BooleanField(default=True)), ], options={ 'verbose_name': 'Payment Selection', 'verbose_name_plural':", "help_text='Required', max_digits=1000, verbose_name='funding price')), ('funding_timeframe', models.CharField(help_text='Required', max_length=255, verbose_name='funding timeframe')), ('funding_window',", "dependencies = [ ] operations = [ migrations.CreateModel( name='FundingOptions', fields=[", "django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies", "<filename>services/server/server/apps/checkout/migrations/0001_initial.py # Generated by Django 4.0.2 on 2022-02-26 15:52 from", "'verbose_name_plural': 'Funding Options', }, ), migrations.CreateModel( name='PaymentSelections', fields=[ ('id', models.BigAutoField(auto_created=True,", "operations = [ migrations.CreateModel( name='FundingOptions', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False,", "verbose_name='funding window')), ], options={ 'verbose_name': 'Funding Option', 'verbose_name_plural': 'Funding Options',", "('name', models.CharField(help_text='Required', max_length=255, verbose_name='name')), ('is_active', models.BooleanField(default=True)), ], options={ 'verbose_name': 'Payment", "('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('funding_name', models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')), ('funding_price',", "('funding_name', models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')), ('funding_price', models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000, verbose_name='funding price')),", "import migrations, models class Migration(migrations.Migration): initial = True dependencies =", "models.CharField(help_text='Required', max_length=255, verbose_name='funding timeframe')), ('funding_window', models.CharField(help_text='Required', max_length=255, verbose_name='funding window')), ],", "('is_active', models.BooleanField(default=True)), ], options={ 'verbose_name': 'Payment Selection', 'verbose_name_plural': 'Payment Selections',", "by Django 4.0.2 on 2022-02-26 15:52 from django.db import migrations,", "'Funding Options', }, ), migrations.CreateModel( name='PaymentSelections', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True,", "True dependencies = [ ] operations = [ migrations.CreateModel( name='FundingOptions',", "verbose_name='ID')), ('funding_name', models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')), ('funding_price', models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000, verbose_name='funding", "'verbose_name': 'Funding Option', 'verbose_name_plural': 'Funding Options', }, ), migrations.CreateModel( name='PaymentSelections',", "}, ), migrations.CreateModel( name='PaymentSelections', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "class Migration(migrations.Migration): initial = True dependencies = [ ] operations", "models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')), ('funding_price', models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000, verbose_name='funding price')), ('funding_timeframe',", "2022-02-26 15:52 from django.db import migrations, models class Migration(migrations.Migration): initial", "= [ migrations.CreateModel( name='FundingOptions', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "= [ ] operations = [ migrations.CreateModel( name='FundingOptions', fields=[ ('id',", "('funding_price', models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000, verbose_name='funding price')), ('funding_timeframe', models.CharField(help_text='Required', max_length=255, verbose_name='funding", "15:52 from django.db import migrations, models class Migration(migrations.Migration): initial =", "models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000, verbose_name='funding price')), ('funding_timeframe', models.CharField(help_text='Required', max_length=255, verbose_name='funding timeframe')),", "window')), ], options={ 'verbose_name': 'Funding Option', 'verbose_name_plural': 'Funding Options', },", "Option', 'verbose_name_plural': 'Funding Options', }, ), migrations.CreateModel( name='PaymentSelections', fields=[ ('id',", "models class Migration(migrations.Migration): initial = True dependencies = [ ]", "max_length=255, verbose_name='funding window')), ], options={ 'verbose_name': 'Funding Option', 'verbose_name_plural': 'Funding", "Options', }, ), migrations.CreateModel( name='PaymentSelections', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False,", "4.0.2 on 2022-02-26 15:52 from django.db import migrations, models class", "Django 4.0.2 on 2022-02-26 15:52 from django.db import migrations, models", "Migration(migrations.Migration): initial = True dependencies = [ ] operations =", "verbose_name='funding_name')), ('funding_price', models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000, verbose_name='funding price')), ('funding_timeframe', models.CharField(help_text='Required', max_length=255,", "('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='Required', max_length=255, verbose_name='name')), ('is_active',", "models.CharField(help_text='Required', max_length=255, verbose_name='name')), ('is_active', models.BooleanField(default=True)), ], options={ 'verbose_name': 'Payment Selection',", "models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='Required', max_length=255, verbose_name='name')), ('is_active', models.BooleanField(default=True)),", "serialize=False, verbose_name='ID')), ('funding_name', models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')), ('funding_price', models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000,", "max_length=255, verbose_name='funding timeframe')), ('funding_window', models.CharField(help_text='Required', max_length=255, verbose_name='funding window')), ], options={", "('funding_window', models.CharField(help_text='Required', max_length=255, verbose_name='funding window')), ], options={ 'verbose_name': 'Funding Option',", "primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='Required', max_length=255, verbose_name='name')), ('is_active', models.BooleanField(default=True)), ],", "migrations.CreateModel( name='FundingOptions', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('funding_name', models.CharField(help_text='Required',", "options={ 'verbose_name': 'Payment Selection', 'verbose_name_plural': 'Payment Selections', }, ), ]", "models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('funding_name', models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')), ('funding_price', models.DecimalField(decimal_places=2,", "migrations.CreateModel( name='PaymentSelections', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='Required',", "'Funding Option', 'verbose_name_plural': 'Funding Options', }, ), migrations.CreateModel( name='PaymentSelections', fields=[", "], options={ 'verbose_name': 'Payment Selection', 'verbose_name_plural': 'Payment Selections', }, ),", "from django.db import migrations, models class Migration(migrations.Migration): initial = True", "= True dependencies = [ ] operations = [ migrations.CreateModel(", "verbose_name='ID')), ('name', models.CharField(help_text='Required', max_length=255, verbose_name='name')), ('is_active', models.BooleanField(default=True)), ], options={ 'verbose_name':", "fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='Required', max_length=255, verbose_name='name')),", "max_digits=1000, verbose_name='funding price')), ('funding_timeframe', models.CharField(help_text='Required', max_length=255, verbose_name='funding timeframe')), ('funding_window', models.CharField(help_text='Required'," ]
[ "get_iss_astm(issledovaniya: list, app: api.Application, need_astm=False): m = [get_astm_header(), get_patient()] n", "if not rel.exists(): continue rel = rel[0] if rel.is_code: researches[k].append([None,", "= directions.Issledovaniya.objects.filter(napravleniye=direction) if not full: iss_list = iss_list.filter(doc_confirmation__isnull=True) for i", "out.write(json.dumps(m)) return encode(m) def get_iss_astm(issledovaniya: list, app: api.Application, need_astm=False): m", "-> list: r = [] n = 0 iss_list =", "= directions.TubesRegistration.objects.filter(type__fractions=fraction) if not tube.exists(): continue tube = tube[0] researches[tube.pk].append(rel.astm_field)", "if not rel.exists(): continue rel = rel[0] tube = directions.TubesRegistration.objects.filter(type__fractions=fraction)", "*iss)) m.append(get_leave()) if out: out.write(json.dumps(m)) return encode(m) def get_iss_astm(issledovaniya: list,", "hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app) if not rel.exists(): continue rel", "continue rel = rel[0] if rel.is_code: researches[k].append([None, None, None, rel.astm_field])", "list: return ['H|\\\\^&', None, None, ['1', '2.00'], None, None, None,", "relationfractionastm__analyzer=analyzer, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer) if not rel.exists(): continue", "in directions_list] m = [get_astm_header(), get_patient()] m = list(itertools.chain(m, *iss))", "rel = rel[0] if rel.is_code: researches[k].append([None, None, None, rel.astm_field]) else:", "None, None, ['1', '2.00'], None, None, None, None, None, None,", "rel[0] tube = directions.TubesRegistration.objects.filter(type__fractions=fraction) if not tube.exists(): continue tube =", "'N'] def get_patient() -> list: return ['P', 1] def get_iss_direction(direction:", "rel.is_code: researches[k].append([None, None, None, rel.astm_field]) else: researches[k].append([None, rel.astm_field, None, None])", "import itertools from astm import codec from collections import defaultdict", "json def get_astm_header() -> list: return ['H|\\\\^&', None, None, ['1',", "in researches[tpk]]]) return r def encode(m) -> str: return codec.iter_encode(m)", "iss_list.filter(doc_confirmation__isnull=True) for i in iss_list: researches = defaultdict(list) for fraction", "timezone import directions.models as directions import directory.models as directory import", "None, None, None, None, None, None, 'P', '1.00', timezone.now().strftime(\"%Y%m%d%H%M%S\")] def", "in iss_list: researches = defaultdict(list) for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer,", "iss = [get_iss_direction(x, analyzer, full) for x in directions_list] m", "iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction) if not full: iss_list = iss_list.filter(doc_confirmation__isnull=True) for", "[get_astm_header(), get_patient()] m = list(itertools.chain(m, *iss)) m.append(get_leave()) if out: out.write(json.dumps(m))", "None]) for tpk in researches: n += 1 m.append(['O', n,", "directory.models as directory import api.models as api import simplejson as", "return encode(m) def get_iss_astm(issledovaniya: list, app: api.Application, need_astm=False): m =", "as directory import api.models as api import simplejson as json", "None, None, None, None, None, 'P', '1.00', timezone.now().strftime(\"%Y%m%d%H%M%S\")] def get_leave()", "defaultdict from django.utils import timezone import directions.models as directions import", "astm import codec from collections import defaultdict from django.utils import", "api import simplejson as json def get_astm_header() -> list: return", "for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app)", "list: return ['L', 1, 'N'] def get_patient() -> list: return", "api.Analyzer, full=False, out=None) -> str: iss = [get_iss_direction(x, analyzer, full)", "['P', 1] def get_iss_direction(direction: directions.Napravleniya, analyzer: api.Analyzer, full=False) -> list:", "n = 0 researches = defaultdict(list) for row in issledovaniya:", "None, None]) for tpk in researches: n += 1 m.append(['O',", "researches: n += 1 r.append(['O', n, tpk, None, [[None, x,", "n, tpk, None, [[None, x, None, None] for x in", "fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app) if", "codec.iter_encode(m) def get_astm(directions_list, analyzer: api.Analyzer, full=False, out=None) -> str: iss", "get_patient()] m = list(itertools.chain(m, *iss)) m.append(get_leave()) if out: out.write(json.dumps(m)) return", "1 r.append(['O', n, tpk, None, [[None, x, None, None] for", "rel.exists(): continue rel = rel[0] if rel.is_code: researches[k].append([None, None, None,", "api.Analyzer, full=False) -> list: r = [] n = 0", "str: iss = [get_iss_direction(x, analyzer, full) for x in directions_list]", "researches = defaultdict(list) for row in issledovaniya: k = row[\"pk\"]", "else: researches[k].append([None, rel.astm_field, None, None]) for tpk in researches: n", "rel[0] if rel.is_code: researches[k].append([None, None, None, rel.astm_field]) else: researches[k].append([None, rel.astm_field,", "['L', 1, 'N'] def get_patient() -> list: return ['P', 1]", "analyzer: api.Analyzer, full=False, out=None) -> str: iss = [get_iss_direction(x, analyzer,", "tube = tube[0] researches[tube.pk].append(rel.astm_field) for tpk in researches: n +=", "= defaultdict(list) for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False): rel =", "= api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app) if not rel.exists(): continue rel = rel[0]", "relationfractionastm__application_api=app, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app) if not rel.exists(): continue", "m = [get_astm_header(), get_patient()] m = list(itertools.chain(m, *iss)) m.append(get_leave()) if", "issledovaniya: k = row[\"pk\"] i = row[\"iss\"] for fraction in", "+= 1 m.append(['O', n, tpk, None, researches[tpk]]) m.append(get_leave()) return encode(m)", "collections import defaultdict from django.utils import timezone import directions.models as", "defaultdict(list) for row in issledovaniya: k = row[\"pk\"] i =", "m = list(itertools.chain(m, *iss)) m.append(get_leave()) if out: out.write(json.dumps(m)) return encode(m)", "k = row[\"pk\"] i = row[\"iss\"] for fraction in directory.Fractions.objects.filter(research=i.research,", "tube.exists(): continue tube = tube[0] researches[tube.pk].append(rel.astm_field) for tpk in researches:", "get_leave() -> list: return ['L', 1, 'N'] def get_patient() ->", "import codec from collections import defaultdict from django.utils import timezone", "list: r = [] n = 0 iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction)", "def get_patient() -> list: return ['P', 1] def get_iss_direction(direction: directions.Napravleniya,", "get_patient() -> list: return ['P', 1] def get_iss_direction(direction: directions.Napravleniya, analyzer:", "= [] n = 0 iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction) if not", "= [get_iss_direction(x, analyzer, full) for x in directions_list] m =", "= list(itertools.chain(m, *iss)) m.append(get_leave()) if out: out.write(json.dumps(m)) return encode(m) def", "list, app: api.Application, need_astm=False): m = [get_astm_header(), get_patient()] n =", "0 researches = defaultdict(list) for row in issledovaniya: k =", "row[\"iss\"] for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction,", "return ['H|\\\\^&', None, None, ['1', '2.00'], None, None, None, None,", "if out: out.write(json.dumps(m)) return encode(m) def get_iss_astm(issledovaniya: list, app: api.Application,", "not rel.exists(): continue rel = rel[0] tube = directions.TubesRegistration.objects.filter(type__fractions=fraction) if", "return ['P', 1] def get_iss_direction(direction: directions.Napravleniya, analyzer: api.Analyzer, full=False) ->", "-> str: iss = [get_iss_direction(x, analyzer, full) for x in", "def encode(m) -> str: return codec.iter_encode(m) def get_astm(directions_list, analyzer: api.Analyzer,", "def get_iss_direction(direction: directions.Napravleniya, analyzer: api.Analyzer, full=False) -> list: r =", "simplejson as json def get_astm_header() -> list: return ['H|\\\\^&', None,", "directions.Issledovaniya.objects.filter(napravleniye=direction) if not full: iss_list = iss_list.filter(doc_confirmation__isnull=True) for i in", "x in directions_list] m = [get_astm_header(), get_patient()] m = list(itertools.chain(m,", "-> list: return ['P', 1] def get_iss_direction(direction: directions.Napravleniya, analyzer: api.Analyzer,", "researches = defaultdict(list) for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False): rel", "[[None, x, None, None] for x in researches[tpk]]]) return r", "import defaultdict from django.utils import timezone import directions.models as directions", "researches[tpk]]]) return r def encode(m) -> str: return codec.iter_encode(m) def", "'1.00', timezone.now().strftime(\"%Y%m%d%H%M%S\")] def get_leave() -> list: return ['L', 1, 'N']", "= 0 iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction) if not full: iss_list =", "r def encode(m) -> str: return codec.iter_encode(m) def get_astm(directions_list, analyzer:", "directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app) if not rel.exists():", "as json def get_astm_header() -> list: return ['H|\\\\^&', None, None,", "directions.Napravleniya, analyzer: api.Analyzer, full=False) -> list: r = [] n", "tpk in researches: n += 1 r.append(['O', n, tpk, None,", "def get_leave() -> list: return ['L', 1, 'N'] def get_patient()", "+= 1 r.append(['O', n, tpk, None, [[None, x, None, None]", "from astm import codec from collections import defaultdict from django.utils", "if rel.is_code: researches[k].append([None, None, None, rel.astm_field]) else: researches[k].append([None, rel.astm_field, None,", "def get_astm(directions_list, analyzer: api.Analyzer, full=False, out=None) -> str: iss =", "i = row[\"iss\"] for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False): rel", "directions import directory.models as directory import api.models as api import", "for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer)", "None, [[None, x, None, None] for x in researches[tpk]]]) return", "m.append(get_leave()) if out: out.write(json.dumps(m)) return encode(m) def get_iss_astm(issledovaniya: list, app:", "directions.models as directions import directory.models as directory import api.models as", "not rel.exists(): continue rel = rel[0] if rel.is_code: researches[k].append([None, None,", "x, None, None] for x in researches[tpk]]]) return r def", "for x in directions_list] m = [get_astm_header(), get_patient()] m =", "None, 'P', '1.00', timezone.now().strftime(\"%Y%m%d%H%M%S\")] def get_leave() -> list: return ['L',", "list(itertools.chain(m, *iss)) m.append(get_leave()) if out: out.write(json.dumps(m)) return encode(m) def get_iss_astm(issledovaniya:", "tube[0] researches[tube.pk].append(rel.astm_field) for tpk in researches: n += 1 r.append(['O',", "tube = directions.TubesRegistration.objects.filter(type__fractions=fraction) if not tube.exists(): continue tube = tube[0]", "1, 'N'] def get_patient() -> list: return ['P', 1] def", "m = [get_astm_header(), get_patient()] n = 0 researches = defaultdict(list)", "rel = api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer) if not rel.exists(): continue rel =", "api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer) if not rel.exists(): continue rel = rel[0] tube", "def get_astm_header() -> list: return ['H|\\\\^&', None, None, ['1', '2.00'],", "as api import simplejson as json def get_astm_header() -> list:", "[] n = 0 iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction) if not full:", "return codec.iter_encode(m) def get_astm(directions_list, analyzer: api.Analyzer, full=False, out=None) -> str:", "need_astm=False): m = [get_astm_header(), get_patient()] n = 0 researches =", "api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app) if not rel.exists(): continue rel = rel[0] if", "get_astm(directions_list, analyzer: api.Analyzer, full=False, out=None) -> str: iss = [get_iss_direction(x,", "itertools from astm import codec from collections import defaultdict from", "return r def encode(m) -> str: return codec.iter_encode(m) def get_astm(directions_list,", "def get_iss_astm(issledovaniya: list, app: api.Application, need_astm=False): m = [get_astm_header(), get_patient()]", "rel = api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app) if not rel.exists(): continue rel =", "app: api.Application, need_astm=False): m = [get_astm_header(), get_patient()] n = 0", "from django.utils import timezone import directions.models as directions import directory.models", "None, None, None, None, 'P', '1.00', timezone.now().strftime(\"%Y%m%d%H%M%S\")] def get_leave() ->", "= iss_list.filter(doc_confirmation__isnull=True) for i in iss_list: researches = defaultdict(list) for", "rel = rel[0] tube = directions.TubesRegistration.objects.filter(type__fractions=fraction) if not tube.exists(): continue", "tpk, None, [[None, x, None, None] for x in researches[tpk]]])", "None, None] for x in researches[tpk]]]) return r def encode(m)", "full=False, out=None) -> str: iss = [get_iss_direction(x, analyzer, full) for", "= [get_astm_header(), get_patient()] n = 0 researches = defaultdict(list) for", "in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app) if not", "n += 1 r.append(['O', n, tpk, None, [[None, x, None,", "= 0 researches = defaultdict(list) for row in issledovaniya: k", "[get_astm_header(), get_patient()] n = 0 researches = defaultdict(list) for row", "not tube.exists(): continue tube = tube[0] researches[tube.pk].append(rel.astm_field) for tpk in", "rel.exists(): continue rel = rel[0] tube = directions.TubesRegistration.objects.filter(type__fractions=fraction) if not", "r.append(['O', n, tpk, None, [[None, x, None, None] for x", "import directory.models as directory import api.models as api import simplejson", "for tpk in researches: n += 1 r.append(['O', n, tpk,", "None, rel.astm_field]) else: researches[k].append([None, rel.astm_field, None, None]) for tpk in", "n += 1 m.append(['O', n, tpk, None, researches[tpk]]) m.append(get_leave()) return", "continue tube = tube[0] researches[tube.pk].append(rel.astm_field) for tpk in researches: n", "application_api=app) if not rel.exists(): continue rel = rel[0] if rel.is_code:", "import api.models as api import simplejson as json def get_astm_header()", "import directions.models as directions import directory.models as directory import api.models", "in researches: n += 1 r.append(['O', n, tpk, None, [[None,", "full) for x in directions_list] m = [get_astm_header(), get_patient()] m", "i in iss_list: researches = defaultdict(list) for fraction in directory.Fractions.objects.filter(research=i.research,", "return ['L', 1, 'N'] def get_patient() -> list: return ['P',", "-> list: return ['H|\\\\^&', None, None, ['1', '2.00'], None, None,", "tpk in researches: n += 1 m.append(['O', n, tpk, None,", "None] for x in researches[tpk]]]) return r def encode(m) ->", "['1', '2.00'], None, None, None, None, None, None, 'P', '1.00',", "timezone.now().strftime(\"%Y%m%d%H%M%S\")] def get_leave() -> list: return ['L', 1, 'N'] def", "r = [] n = 0 iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction) if", "x in researches[tpk]]]) return r def encode(m) -> str: return", "n = 0 iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction) if not full: iss_list", "str: return codec.iter_encode(m) def get_astm(directions_list, analyzer: api.Analyzer, full=False, out=None) ->", "out=None) -> str: iss = [get_iss_direction(x, analyzer, full) for x", "= [get_astm_header(), get_patient()] m = list(itertools.chain(m, *iss)) m.append(get_leave()) if out:", "analyzer=analyzer) if not rel.exists(): continue rel = rel[0] tube =", "iss_list: researches = defaultdict(list) for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False):", "get_patient()] n = 0 researches = defaultdict(list) for row in", "rel.astm_field, None, None]) for tpk in researches: n += 1", "= defaultdict(list) for row in issledovaniya: k = row[\"pk\"] i", "1] def get_iss_direction(direction: directions.Napravleniya, analyzer: api.Analyzer, full=False) -> list: r", "row in issledovaniya: k = row[\"pk\"] i = row[\"iss\"] for", "directions_list] m = [get_astm_header(), get_patient()] m = list(itertools.chain(m, *iss)) m.append(get_leave())", "in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer) if not", "rel.astm_field]) else: researches[k].append([None, rel.astm_field, None, None]) for tpk in researches:", "for tpk in researches: n += 1 m.append(['O', n, tpk,", "'P', '1.00', timezone.now().strftime(\"%Y%m%d%H%M%S\")] def get_leave() -> list: return ['L', 1,", "api.Application, need_astm=False): m = [get_astm_header(), get_patient()] n = 0 researches", "in issledovaniya: k = row[\"pk\"] i = row[\"iss\"] for fraction", "researches[k].append([None, rel.astm_field, None, None]) for tpk in researches: n +=", "as directions import directory.models as directory import api.models as api", "django.utils import timezone import directions.models as directions import directory.models as", "if not tube.exists(): continue tube = tube[0] researches[tube.pk].append(rel.astm_field) for tpk", "import timezone import directions.models as directions import directory.models as directory", "for row in issledovaniya: k = row[\"pk\"] i = row[\"iss\"]", "api.models as api import simplejson as json def get_astm_header() ->", "in researches: n += 1 m.append(['O', n, tpk, None, researches[tpk]])", "directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer) if not rel.exists():", "<filename>api/to_astm.py import itertools from astm import codec from collections import", "if not full: iss_list = iss_list.filter(doc_confirmation__isnull=True) for i in iss_list:", "-> list: return ['L', 1, 'N'] def get_patient() -> list:", "codec from collections import defaultdict from django.utils import timezone import", "not full: iss_list = iss_list.filter(doc_confirmation__isnull=True) for i in iss_list: researches", "for i in iss_list: researches = defaultdict(list) for fraction in", "for x in researches[tpk]]]) return r def encode(m) -> str:", "researches[k].append([None, None, None, rel.astm_field]) else: researches[k].append([None, rel.astm_field, None, None]) for", "= api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer) if not rel.exists(): continue rel = rel[0]", "encode(m) def get_iss_astm(issledovaniya: list, app: api.Application, need_astm=False): m = [get_astm_header(),", "get_astm_header() -> list: return ['H|\\\\^&', None, None, ['1', '2.00'], None,", "full=False) -> list: r = [] n = 0 iss_list", "['H|\\\\^&', None, None, ['1', '2.00'], None, None, None, None, None,", "= rel[0] tube = directions.TubesRegistration.objects.filter(type__fractions=fraction) if not tube.exists(): continue tube", "directory import api.models as api import simplejson as json def", "full: iss_list = iss_list.filter(doc_confirmation__isnull=True) for i in iss_list: researches =", "None, ['1', '2.00'], None, None, None, None, None, None, 'P',", "hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer) if not rel.exists(): continue rel", "analyzer, full) for x in directions_list] m = [get_astm_header(), get_patient()]", "= row[\"pk\"] i = row[\"iss\"] for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app,", "0 iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction) if not full: iss_list = iss_list.filter(doc_confirmation__isnull=True)", "fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer) if", "'2.00'], None, None, None, None, None, None, 'P', '1.00', timezone.now().strftime(\"%Y%m%d%H%M%S\")]", "None, None, rel.astm_field]) else: researches[k].append([None, rel.astm_field, None, None]) for tpk", "None, None, None, 'P', '1.00', timezone.now().strftime(\"%Y%m%d%H%M%S\")] def get_leave() -> list:", "analyzer: api.Analyzer, full=False) -> list: r = [] n =", "= rel[0] if rel.is_code: researches[k].append([None, None, None, rel.astm_field]) else: researches[k].append([None,", "directions.TubesRegistration.objects.filter(type__fractions=fraction) if not tube.exists(): continue tube = tube[0] researches[tube.pk].append(rel.astm_field) for", "-> str: return codec.iter_encode(m) def get_astm(directions_list, analyzer: api.Analyzer, full=False, out=None)", "get_iss_direction(direction: directions.Napravleniya, analyzer: api.Analyzer, full=False) -> list: r = []", "= tube[0] researches[tube.pk].append(rel.astm_field) for tpk in researches: n += 1", "defaultdict(list) for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction,", "row[\"pk\"] i = row[\"iss\"] for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False):", "from collections import defaultdict from django.utils import timezone import directions.models", "[get_iss_direction(x, analyzer, full) for x in directions_list] m = [get_astm_header(),", "researches[tube.pk].append(rel.astm_field) for tpk in researches: n += 1 r.append(['O', n,", "out: out.write(json.dumps(m)) return encode(m) def get_iss_astm(issledovaniya: list, app: api.Application, need_astm=False):", "list: return ['P', 1] def get_iss_direction(direction: directions.Napravleniya, analyzer: api.Analyzer, full=False)", "= row[\"iss\"] for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False): rel =", "iss_list = iss_list.filter(doc_confirmation__isnull=True) for i in iss_list: researches = defaultdict(list)", "researches: n += 1 m.append(['O', n, tpk, None, researches[tpk]]) m.append(get_leave())", "continue rel = rel[0] tube = directions.TubesRegistration.objects.filter(type__fractions=fraction) if not tube.exists():", "None, None, 'P', '1.00', timezone.now().strftime(\"%Y%m%d%H%M%S\")] def get_leave() -> list: return", "encode(m) -> str: return codec.iter_encode(m) def get_astm(directions_list, analyzer: api.Analyzer, full=False,", "import simplejson as json def get_astm_header() -> list: return ['H|\\\\^&'," ]
[ "''' def test_simple_rom(self): rom_in = ROM1 som = parse_rom_image(rom_in) rom_out", "sdbc.create_device_record(name = \"device 2\", size = 0x100) m1 = sdbc.create_device_record(name", "= generate_rom_image(som) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in, rom_out) ''' def test_full_dionysus_read(self):", "#print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): sm = som.SOM()", "\"memory 2\", size = 0x20000) peripheral.set_child_spacing(0x0010000000) root.set_child_spacing (0x0100000000) sm.insert_component(peripheral, d1)", "(0x0100000000) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom", "16 == 0): magic = \"0x%s\" % (rom_in[i].lower()) last_val =", "\"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "\"10000000\\n\"\\ \"00000000\\n\"\\ \"10000008\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F67\\n\"\\ \"70696F00\\n\"\\", "#print_sdb_rom(rom_out) self.assertEqual(rom_in, rom_out) def test_full_bus_with_integration(self): sm = som.SOM() sm.initialize_root() root", "\"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\", "self.assertEqual(rom_in, rom_out) ''' def test_full_dionysus_read(self): from nysa.host.platform_scanner import PlatformScanner pscanner", "\"77625F73\\n\"\\ \"6472616D\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "1\", size = 0x100) d2 = sdbc.create_device_record(name = \"device 2\",", "pscanner = PlatformScanner() platform_dict = pscanner.get_platforms() platform_names = platform_dict.keys() if", "\"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "2] and rom_in[i + 3] == rom_out[i + 3]: print", "ROM2 = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"03000000\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "rom_out = rom_out.splitlines() for i in range (0, len(rom_in), 4):", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\", "sm.insert_component(root, url) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory,", "sdbc.create_synthesis_record(\"Synthesis Name\", 123, \"cool tool\", 1.0, \"jeff\") sm.insert_component(root, url) sm.insert_component(root,", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\", "(0, len(rom), 4): if (i % 16 == 0): magic", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\", "generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def", "s.Verbose(\"Read SDB\") dionysus.read_sdb() def test_full_bus(self): sm = som.SOM() sm.initialize_root() root", "\"00000000\\n\"\\ \"00000207\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000003\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "\"Empty\" else: print \"???\" if rom_in[i] == rom_out[i] and rom_in[i", "elif last_val == 0xFF: print \"Empty\" else: print \"???\" if", "\"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000340\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"53444200\\n\"\\", "15], 16) & 0xFF print \"\" if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000001\\n\"\\", "(i % 16 == 0): magic = \"0x%s\" % (rom_in[i].lower())", "rom = rom.splitlines() print \"ROM\" for i in range (0,", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\", "= parse_rom_image(rom_in) rom_out = generate_rom_image(som) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in, rom_out)", "#print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out)", "\"70696F00\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "= \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "StatusLevel from nysa.common.status import Status class Test (unittest.TestCase): \"\"\"Unit test", "print \"Device\" elif last_val == 0x02: print \"Bridge\" elif last_val", "= sm.insert_bus() peripheral.set_name(\"peripheral\") memory = sm.insert_bus() memory.set_name(\"memory\") d1 = sdbc.create_device_record(name", "vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral,", "== 0): print \"Interconnect\" elif last_val == 0x01: print \"Device\"", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\", "peripheral.set_name(\"peripheral\") memory = sm.insert_bus() memory.set_name(\"memory\") d1 = sdbc.create_device_record(name = \"device", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\", "= generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) self.assertEqual(rom_in, rom_out) def test_full_bus_with_integration(self):", "rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2 #print_sdb_rom(rom_in)", "generate_rom_image from nysa.cbuilder.sdb import SDBInfo from nysa.cbuilder.sdb import SDBWarning from", "rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm = parse_rom_image(rom_in)", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F67\\n\"\\ \"70696F00\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000340\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\", "0x100) m1 = sdbc.create_device_record(name = \"memory 1\", size = 0x10000)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "= 0x100) m1 = sdbc.create_device_record(name = \"memory 1\", size =", "rom_in[i] == rom_out[i] and rom_in[i + 1] == rom_out[i +", "sdbc.convert_rom_to_32bit_buffer(rom) rom = rom.splitlines() print \"ROM\" for i in range", "#rom = sdbc.convert_rom_to_32bit_buffer(rom) rom = rom.splitlines() print \"ROM\" for i", "= \"memory 2\", size = 0x20000) peripheral.set_child_spacing(0x0010000000) root.set_child_spacing (0x0100000000) sm.insert_component(peripheral,", "unittest import json import sys import os import string sys.path.append(os.path.join(os.path.dirname(__file__),", "size = 0x20000) peripheral.set_child_spacing(0x0010000000) root.set_child_spacing (0x0100000000) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2)", "ROMD = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\", "%s %s\" % (rom_in[i], rom_in[i + 1], rom_in[i + 2],", "\"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "== rom_out[i] and rom_in[i + 1] == rom_out[i + 1]", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\", "= pscanner.get_platforms() platform_names = platform_dict.keys() if \"dionysus\" not in platform_names:", "2], rom_in[i + 3]) else: print \"%s %s : %s", "for i in range (0, len(rom), 4): if (i %", "sys import os import string sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) from nysa.cbuilder", "\"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\", "2], rom_in[i + 3], rom_out[i], rom_out[i + 1], rom_out[i +", "= 0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") synthesis =", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\", "\"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\", "rom_out[i + 1] and rom_in[i + 2] == rom_out[i +", "as som from nysa.cbuilder.som_rom_parser import parse_rom_image from nysa.cbuilder.som_rom_generator import generate_rom_image", "\"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\", "SDBInfo from nysa.cbuilder.sdb import SDBWarning from nysa.cbuilder.sdb import SDBError from", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\", "nysa.common.status import StatusLevel from nysa.common.status import Status class Test (unittest.TestCase):", "\"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "print \"Length of rom is not equal!\" return rom_in =", "= \"0x%s\" % (rom_in[i].lower()) last_val = int(rom_in[i + 15], 16)", "self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): rom_in = ROMD #print_sdb(rom) sm =", "m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in", "in range (0, len(rom), 4): if (i % 16 ==", "import unittest import json import sys import os import string", "import SDBWarning from nysa.cbuilder.sdb import SDBError from nysa.common.status import StatusLevel", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\"", "2\", size = 0x20000) intr = sdbc.create_integration_record(\"Integration Data\", vendor_id =", "Status class Test (unittest.TestCase): \"\"\"Unit test SDB Tree\"\"\" def setUp(self):", "+ 2] and rom_in[i + 3] == rom_out[i + 3]:", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\", "sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom =", "\"00010000\\n\"\\ \"00000000\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792032\\n\"\\", "d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm)", "d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in =", "#print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): rom_in = ROMD", "\"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000340\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\", "sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in", "\"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\", "& 0xFF print \"\" if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val", "= \"memory 2\", size = 0x20000) intr = sdbc.create_integration_record(\"Integration Data\",", "intr = sdbc.create_integration_record(\"Integration Data\", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000)", "\"00000000\\n\"\\ \"00000040\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "= platform_instance.scan() if len(platforms) == 0: return dionysus = platforms[platforms.keys()[0]]", "= sdbc.create_device_record(name = \"memory 2\", size = 0x20000) peripheral.set_child_spacing(0x0010000000) root.set_child_spacing", "len(rom_in), 4): if (i % 16 == 0): magic =", "import StatusLevel from nysa.common.status import Status class Test (unittest.TestCase): \"\"\"Unit", "\"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out)", "rom_out) def compare_roms(rom_in, rom_out): if len(rom_in) != len(rom_out): print \"Length", "def test_full_dionysus_read(self): from nysa.host.platform_scanner import PlatformScanner pscanner = PlatformScanner() platform_dict", "test_generate_one_sub_bus_with_url(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral =", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000003\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\", "\"00010100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\", "0x00000000) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1)", "sdb_object_model as som from nysa.cbuilder.som_rom_parser import parse_rom_image from nysa.cbuilder.som_rom_generator import", "elif last_val == 0x01: print \"Device\" elif last_val == 0x02:", "\"Found Dionysus\" s.set_level(\"fatal\") s.Verbose(\"Read SDB\") dionysus.read_sdb() def test_full_bus(self): sm =", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "#print_sdb_rom(rom_in) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "test_full_bus(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral =", "vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") synthesis", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792031\\n\"\\", ": %s %s != %s %s : %s %s\" %", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F73\\n\"\\ \"6472616D\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROMD = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\", "\"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "i in range (0, len(rom_in), 4): if (i % 16", "rom[i + 1], rom[i + 2], rom[i + 3]) ROM1", "= parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) self.assertEqual(rom_in,", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\", "sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): sm =", "4): if (i % 16 == 0): magic = \"0x%s\"", "print \"%s %s : %s %s != %s %s :", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\", "% (rom[i].lower()) last_val = int(rom[i + 15], 16) & 0xFF", "\"140F0106\\n\"\\ \"53444200\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000101\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"10000000\\n\"\\", "\"00000000\\n\"\\ \"00010000\\n\"\\ \"00000000\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\", "\"%s %s : %s %s != %s %s : %s", "def test_generate_one_sub_bus_with_url(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral", "\"00000000\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792032\\n\"\\ \"00000000\\n\"\\", "\"140F0107\\n\"\\ \"77625F67\\n\"\\ \"70696F00\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "import sdb_component as sdbc from nysa.cbuilder import sdb_object_model as som", "\"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "device_id = 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") sm.insert_component(root, url) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral,", "\"00010000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"00000000\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\", "print \"???\" print \"%s %s : %s %s\" % (rom[i],", "print \"\" if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0):", "platform_instance.scan() if len(platforms) == 0: return dionysus = platforms[platforms.keys()[0]] #print", "rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): sm = som.SOM() sm.initialize_root() root", "last_val == 0x01: print \"Device\" elif last_val == 0x02: print", "+ 2] == rom_out[i + 2] and rom_in[i + 3]", "m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm =", "import sdb_object_model as som from nysa.cbuilder.som_rom_parser import parse_rom_image from nysa.cbuilder.som_rom_generator", "last_val == 0x82: print \"Synthesis\" elif last_val == 0xFF: print", "rom_out[i + 3]) def print_sdb_rom(rom): #rom = sdbc.convert_rom_to_32bit_buffer(rom) rom =", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\", "== 0: return dionysus = platforms[platforms.keys()[0]] #print \"Found Dionysus\" s.set_level(\"fatal\")", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\", "\"00000000\\n\"\\ \"00000502\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\", "= sdbc.create_integration_record(\"Integration Data\", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) peripheral.set_child_spacing(0x0100000000)", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F73\\n\"\\ \"6472616D\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "+ 3]) def print_sdb_rom(rom): #rom = sdbc.convert_rom_to_32bit_buffer(rom) rom = rom.splitlines()", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\", "rom[i + 3]) ROM1 = \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "Name\", 123, \"cool tool\", 1.0, \"jeff\") sm.insert_component(root, url) sm.insert_component(root, synthesis)", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\", "\"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\", "peripheral.set_child_spacing(0x0010000000) root.set_child_spacing (0x0100000000) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory,", "SDBWarning from nysa.cbuilder.sdb import SDBError from nysa.common.status import StatusLevel from", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\", "0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") sm.insert_component(root, url) peripheral.set_child_spacing(0x0100000000)", "\"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000340\\n\"\\", "\"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus()", "sdbc.create_repo_url_record(\"http://www.geocities.com\") synthesis = sdbc.create_synthesis_record(\"Synthesis Name\", 123, \"cool tool\", 1.0, \"jeff\")", "+ 2], rom_in[i + 3]) else: print \"%s %s :", "len(rom_in) != len(rom_out): print \"Length of rom is not equal!\"", "SDB\") dionysus.read_sdb() def test_full_bus(self): sm = som.SOM() sm.initialize_root() root =", "0x02: print \"Bridge\" elif last_val == 0x80: print \"Integration\" elif", "\"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\", "\"77625F67\\n\"\\ \"70696F00\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "url = sdbc.create_repo_url_record(\"http://www.geocities.com\") sm.insert_component(root, url) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\", "\"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F73\\n\"\\ \"6472616D\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\", "== 0): magic = \"0x%s\" % (rom_in[i].lower()) last_val = int(rom_in[i", "peripheral = sm.insert_bus() peripheral.set_name(\"peripheral\") memory = sm.insert_bus() memory.set_name(\"memory\") d1 =", "\"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\", "\"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "% (rom_in[i], rom_in[i + 1], rom_in[i + 2], rom_in[i +", "= 0x800BEAF15DEADC03, device_id = 0x00000000) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1)", "= sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in, rom_out) ''' def test_full_dionysus_read(self): from nysa.host.platform_scanner import", "platform_dict = pscanner.get_platforms() platform_names = platform_dict.keys() if \"dionysus\" not in", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000502\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\", "platforms[platforms.keys()[0]] #print \"Found Dionysus\" s.set_level(\"fatal\") s.Verbose(\"Read SDB\") dionysus.read_sdb() def test_full_bus(self):", "elif last_val == 0x81: print \"URL\" elif last_val == 0x82:", "(magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0): print \"Interconnect\" elif", "pass ''' def test_simple_rom(self): rom_in = ROM1 som = parse_rom_image(rom_in)", "def print_sdb_rom(rom): #rom = sdbc.convert_rom_to_32bit_buffer(rom) rom = rom.splitlines() print \"ROM\"", "from nysa.cbuilder.som_rom_generator import generate_rom_image from nysa.cbuilder.sdb import SDBInfo from nysa.cbuilder.sdb", "\"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "\"memory 2\", size = 0x20000) intr = sdbc.create_integration_record(\"Integration Data\", vendor_id", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F73\\n\"\\", "if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0): print \"Interconnect\"", "elif last_val == 0x02: print \"Bridge\" elif last_val == 0x80:", "\"Empty\" else: print \"???\" print \"%s %s : %s %s\"", "16 == 0): magic = \"0x%s\" % (rom[i].lower()) last_val =", "\"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\", "\"00000000\\n\"\\ \"00000003\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652032\\n\"\\", "\"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "= 0x20000) peripheral.set_child_spacing(0x0010000000) root.set_child_spacing (0x0100000000) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory,", "rom is not equal!\" return rom_in = rom_in.splitlines() rom_out =", "print \"???\" if rom_in[i] == rom_out[i] and rom_in[i + 1]", "\"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\", "= sm.get_root() peripheral = sm.insert_bus() peripheral.set_name(\"peripheral\") memory = sm.insert_bus() memory.set_name(\"memory\")", "= sdbc.create_repo_url_record(\"http://www.geocities.com\") sm.insert_component(root, url) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral,", "\"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "memory.set_name(\"memory\") d1 = sdbc.create_device_record(name = \"device 1\", size = 0x100)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROM2", "last_val = int(rom_in[i + 15], 16) & 0xFF print \"\"", "\"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\", "elif last_val == 0x80: print \"Integration\" elif last_val == 0x81:", "\"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\", "== 0): magic = \"0x%s\" % (rom[i].lower()) last_val = int(rom[i", "\"00000000\\n\"\\ \"10000000\\n\"\\ \"00000000\\n\"\\ \"10000008\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F67\\n\"\\", "= PlatformScanner() platform_dict = pscanner.get_platforms() platform_names = platform_dict.keys() if \"dionysus\"", "m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom)", "\"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000003\\n\"\\", "#print \"Found Dionysus\" s.set_level(\"fatal\") s.Verbose(\"Read SDB\") dionysus.read_sdb() def test_full_bus(self): sm", "== rom_out[i + 1] and rom_in[i + 2] == rom_out[i", "3] == rom_out[i + 3]: print \"%s %s : %s", "is not equal!\" return rom_in = rom_in.splitlines() rom_out = rom_out.splitlines()", "import PlatformScanner pscanner = PlatformScanner() platform_dict = pscanner.get_platforms() platform_names =", "dionysus.read_sdb() def test_full_bus(self): sm = som.SOM() sm.initialize_root() root = sm.get_root()", "\"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\", "#compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): rom_in = ROMD #print_sdb(rom)", "= rom.splitlines() print \"ROM\" for i in range (0, len(rom),", "0x81: print \"URL\" elif last_val == 0x82: print \"Synthesis\" elif", "\"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "= parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out) #compare_roms(rom_in,", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"53444200\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000101\\n\"\\ \"00000207\\n\"\\", "platform_names: return s = Status() platform_instance = platform_dict[\"dionysus\"](s) platforms =", "\"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "rom_out[i + 3]: print \"%s %s : %s %s\" %", "magic = \"0x%s\" % (rom[i].lower()) last_val = int(rom[i + 15],", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000502\\n\"\\ \"00000207\\n\"\\", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\", "\"6D656D6F\\n\"\\ \"72792031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"00000000\\n\"\\", "nysa.cbuilder import sdb_component as sdbc from nysa.cbuilder import sdb_object_model as", "import Status class Test (unittest.TestCase): \"\"\"Unit test SDB Tree\"\"\" def", "0x82: print \"Synthesis\" elif last_val == 0xFF: print \"Empty\" else:", "parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) self.assertEqual(rom_in, rom_out)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROMD = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"53444200\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "\"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"%s %s : %s %s\" % (rom_in[i], rom_in[i + 1],", "\"00000001\\n\"\\ \"00000101\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"10000000\\n\"\\ \"00000000\\n\"\\ \"10000008\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000101\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"10000000\\n\"\\ \"00000000\\n\"\\ \"10000008\\n\"\\", "len(platforms) == 0: return dionysus = platforms[platforms.keys()[0]] #print \"Found Dionysus\"", "= ROM2 #print_sdb_rom(rom_in) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\", "= sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) self.assertEqual(rom_in, rom_out) def test_full_bus_with_integration(self): sm = som.SOM()", "\"00000001\\n\"\\ \"140F0106\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\", "sdbc from nysa.cbuilder import sdb_object_model as som from nysa.cbuilder.som_rom_parser import", "platform_names = platform_dict.keys() if \"dionysus\" not in platform_names: return s", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00010000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792031\\n\"\\ \"00000000\\n\"\\", "== rom_out[i + 2] and rom_in[i + 3] == rom_out[i", "int(rom_in[i + 15], 16) & 0xFF print \"\" if (magic", "#!/usr/bin/python import unittest import json import sys import os import", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\", "m2 = sdbc.create_device_record(name = \"memory 2\", size = 0x20000) peripheral.set_child_spacing(0x0010000000)", "#compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def compare_roms(rom_in, rom_out): if len(rom_in) !=", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROM2 = \"5344422D\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "1] == rom_out[i + 1] and rom_in[i + 2] ==", "3]) else: print \"%s %s : %s %s != %s", "#compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): sm = som.SOM() sm.initialize_root()", "0xFF print \"\" if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val ==", "rom.splitlines() print \"ROM\" for i in range (0, len(rom), 4):", "Tree\"\"\" def setUp(self): pass ''' def test_simple_rom(self): rom_in = ROM1", "\"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "''' def test_full_dionysus_read(self): from nysa.host.platform_scanner import PlatformScanner pscanner = PlatformScanner()", "% (rom[i], rom[i + 1], rom[i + 2], rom[i +", "url) sm.insert_component(root, synthesis) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2)", "nysa.cbuilder.sdb import SDBWarning from nysa.cbuilder.sdb import SDBError from nysa.common.status import", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\", "\"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "= sdbc.create_repo_url_record(\"http://www.geocities.com\") synthesis = sdbc.create_synthesis_record(\"Synthesis Name\", 123, \"cool tool\", 1.0,", "\"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F67\\n\"\\ \"70696F00\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\", "elif last_val == 0xFF: print \"Empty\" else: print \"???\" print", "sm.get_root() peripheral = sm.insert_bus() peripheral.set_name(\"peripheral\") memory = sm.insert_bus() memory.set_name(\"memory\") d1", "(rom_in[i], rom_in[i + 1], rom_in[i + 2], rom_in[i + 3],", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F73\\n\"\\ \"6472616D\\n\"\\", "rom_in[i + 1], rom_in[i + 2], rom_in[i + 3]) else:", "if \"dionysus\" not in platform_names: return s = Status() platform_instance", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000340\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"53444200\\n\"\\ \"00000000\\n\"\\", "\"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") synthesis = sdbc.create_synthesis_record(\"Synthesis Name\", 123, \"cool", "rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in,", "sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out)", "\"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F73\\n\"\\ \"6472616D\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\", "rom_in = rom_in.splitlines() rom_out = rom_out.splitlines() for i in range", "== 0x81: print \"URL\" elif last_val == 0x82: print \"Synthesis\"", "= 0x100) d2 = sdbc.create_device_record(name = \"device 2\", size =", "generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def", "sm.insert_bus() memory.set_name(\"memory\") d1 = sdbc.create_device_record(name = \"device 1\", size =", "\"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "= sm.insert_bus() memory.set_name(\"memory\") d1 = sdbc.create_device_record(name = \"device 1\", size", "+ 3]: print \"%s %s : %s %s\" % (rom_in[i],", "= sdbc.convert_rom_to_32bit_buffer(rom) rom = rom.splitlines() print \"ROM\" for i in", "ROM1 som = parse_rom_image(rom_in) rom_out = generate_rom_image(som) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out)", "Dionysus\" s.set_level(\"fatal\") s.Verbose(\"Read SDB\") dionysus.read_sdb() def test_full_bus(self): sm = som.SOM()", "rom_in[i + 1] == rom_out[i + 1] and rom_in[i +", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"03000000\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\", "import sys import os import string sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) from", "s.set_level(\"fatal\") s.Verbose(\"Read SDB\") dionysus.read_sdb() def test_full_bus(self): sm = som.SOM() sm.initialize_root()", "\"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000100\\n\"\\", "= sdbc.create_device_record(name = \"memory 1\", size = 0x10000) m2 =", "0xFF: print \"Empty\" else: print \"???\" if rom_in[i] == rom_out[i]", "= sdbc.create_device_record(name = \"device 1\", size = 0x100) d2 =", "test_full_bus_with_integration(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral =", "from nysa.cbuilder.sdb import SDBInfo from nysa.cbuilder.sdb import SDBWarning from nysa.cbuilder.sdb", "\"\" if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0): print", "parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out)", "\"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000100\\n\"\\", "#rom_in = ROM2 #print_sdb_rom(rom_in) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm)", "equal!\" return rom_in = rom_in.splitlines() rom_out = rom_out.splitlines() for i", "(rom[i], rom[i + 1], rom[i + 2], rom[i + 3])", "\"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000502\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00800000\\n\"\\", "vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") sm.insert_component(root,", "\"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "\"00000001\\n\"\\ \"140F0106\\n\"\\ \"53444200\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000101\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\", "0x800BEAF15DEADC03, device_id = 0x00000000) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral,", "from nysa.cbuilder import sdb_object_model as som from nysa.cbuilder.som_rom_parser import parse_rom_image", "\"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "\"???\" print \"%s %s : %s %s\" % (rom[i], rom[i", "\"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"00000000\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"72792032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "= platforms[platforms.keys()[0]] #print \"Found Dionysus\" s.set_level(\"fatal\") s.Verbose(\"Read SDB\") dionysus.read_sdb() def", "last_val == 0x02: print \"Bridge\" elif last_val == 0x80: print", "rom_out[i + 2] and rom_in[i + 3] == rom_out[i +", "1], rom[i + 2], rom[i + 3]) ROM1 = \"5344422D\\n\"\\", "\"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\", "= \"device 1\", size = 0x100) d2 = sdbc.create_device_record(name =", "\"53444200\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000101\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"10000000\\n\"\\ \"00000000\\n\"\\", "\"140F0107\\n\"\\ \"77625F73\\n\"\\ \"6472616D\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROM2 = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\", "= generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2 #print_sdb_rom(rom_in) sm", "parse_rom_image(rom_in) rom_out = generate_rom_image(som) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in, rom_out) '''", "else: print \"%s %s : %s %s != %s %s", "\"00030000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "nysa.cbuilder.sdb import SDBError from nysa.common.status import StatusLevel from nysa.common.status import", "0x20000) intr = sdbc.create_integration_record(\"Integration Data\", vendor_id = 0x800BEAF15DEADC03, device_id =", "% 16 == 0): magic = \"0x%s\" % (rom_in[i].lower()) last_val", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROMD =", "\"00000001\\n\"\\ \"00000000\\n\"\\ \"00000003\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\", "generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) self.assertEqual(rom_in, rom_out) def test_full_bus_with_integration(self): sm", "sm.insert_bus() peripheral.set_name(\"peripheral\") memory = sm.insert_bus() memory.set_name(\"memory\") d1 = sdbc.create_device_record(name =", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "sdbc.create_device_record(name = \"memory 2\", size = 0x20000) peripheral.set_child_spacing(0x0010000000) root.set_child_spacing (0x0100000000)", "\"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000340\\n\"\\ \"80000000\\n\"\\", "self.assertEqual(rom_in, rom_out) def compare_roms(rom_in, rom_out): if len(rom_in) != len(rom_out): print", "\"00000000\\n\"\\ \"03000000\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\", "+ 3], rom_out[i], rom_out[i + 1], rom_out[i + 2], rom_out[i", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\", "platform_dict.keys() if \"dionysus\" not in platform_names: return s = Status()", "last_val == 0xFF: print \"Empty\" else: print \"???\" print \"%s", "\"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\", "nysa.host.platform_scanner import PlatformScanner pscanner = PlatformScanner() platform_dict = pscanner.get_platforms() platform_names", "rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): rom_in = ROMD #print_sdb(rom) sm", "\"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "PlatformScanner pscanner = PlatformScanner() platform_dict = pscanner.get_platforms() platform_names = platform_dict.keys()", "1\", size = 0x10000) m2 = sdbc.create_device_record(name = \"memory 2\",", "\"00000040\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "rom_out) def test_full_bus_with_integration(self): sm = som.SOM() sm.initialize_root() root = sm.get_root()", "print \"Integration\" elif last_val == 0x81: print \"URL\" elif last_val", "d2 = sdbc.create_device_record(name = \"device 2\", size = 0x100) m1", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\", "sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name(\"peripheral\") memory =", "== 0x82: print \"Synthesis\" elif last_val == 0xFF: print \"Empty\"", "3]) def print_sdb_rom(rom): #rom = sdbc.convert_rom_to_32bit_buffer(rom) rom = rom.splitlines() print", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROMD", "os.pardir)) from nysa.cbuilder import sdb_component as sdbc from nysa.cbuilder import", "1] and rom_in[i + 2] == rom_out[i + 2] and", "(i % 16 == 0): magic = \"0x%s\" % (rom[i].lower())", "\"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000502\\n\"\\", "0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") synthesis = sdbc.create_synthesis_record(\"Synthesis", "range (0, len(rom_in), 4): if (i % 16 == 0):", "\"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "rom_in = ROMD #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm)", "%s %s : %s %s\" % (rom_in[i], rom_in[i + 1],", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000340\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "\"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\", "last_val == 0xFF: print \"Empty\" else: print \"???\" if rom_in[i]", "% 16 == 0): magic = \"0x%s\" % (rom[i].lower()) last_val", "\"Bridge\" elif last_val == 0x80: print \"Integration\" elif last_val ==", "and rom_in[i + 3] == rom_out[i + 3]: print \"%s", "\"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "= sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): rom_in", "= parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in,", "nysa.cbuilder.sdb import SDBInfo from nysa.cbuilder.sdb import SDBWarning from nysa.cbuilder.sdb import", "= generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out)", "print \"Empty\" else: print \"???\" if rom_in[i] == rom_out[i] and", "len(rom), 4): if (i % 16 == 0): magic =", "\"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\", "self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): sm = som.SOM() sm.initialize_root() root =", "= Status() platform_instance = platform_dict[\"dionysus\"](s) platforms = platform_instance.scan() if len(platforms)", "sm.insert_component(root, url) sm.insert_component(root, synthesis) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral,", "rom_out) ''' def test_full_dionysus_read(self): from nysa.host.platform_scanner import PlatformScanner pscanner =", "0): magic = \"0x%s\" % (rom_in[i].lower()) last_val = int(rom_in[i +", "\"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"03000000\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\", "d1 = sdbc.create_device_record(name = \"device 1\", size = 0x100) d2", "\"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\", "rom_out) def test_generate_one_sub_bus_with_url(self): sm = som.SOM() sm.initialize_root() root = sm.get_root()", "(rom[i].lower()) last_val = int(rom[i + 15], 16) & 0xFF print", "\"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "rom_out[i] and rom_in[i + 1] == rom_out[i + 1] and", "import os import string sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) from nysa.cbuilder import", "def test_generate_one_sub_bus_with_url(self): rom_in = ROMD #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out", "== 0x80: print \"Integration\" elif last_val == 0x81: print \"URL\"", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "som from nysa.cbuilder.som_rom_parser import parse_rom_image from nysa.cbuilder.som_rom_generator import generate_rom_image from", "= platform_dict[\"dionysus\"](s) platforms = platform_instance.scan() if len(platforms) == 0: return", "test_simple_rom(self): rom_in = ROM1 som = parse_rom_image(rom_in) rom_out = generate_rom_image(som)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"53444200\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000101\\n\"\\", "print \"%s %s : %s %s\" % (rom_in[i], rom_in[i +", "SDB Tree\"\"\" def setUp(self): pass ''' def test_simple_rom(self): rom_in =", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\", "\"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\", "magic = \"0x%s\" % (rom_in[i].lower()) last_val = int(rom_in[i + 15],", "if len(rom_in) != len(rom_out): print \"Length of rom is not", "+ 2], rom[i + 3]) ROM1 = \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000000\\n\"\\", "sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): rom_in =", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"00000101\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"10000000\\n\"\\ \"00000000\\n\"\\ \"10000008\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "self.assertEqual(rom_in, rom_out) def test_full_bus_with_integration(self): sm = som.SOM() sm.initialize_root() root =", "\"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\", "from nysa.cbuilder import sdb_component as sdbc from nysa.cbuilder import sdb_object_model", "sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) from nysa.cbuilder import sdb_component as sdbc from", "\"Device\" elif last_val == 0x02: print \"Bridge\" elif last_val ==", "ROM2 #print_sdb_rom(rom_in) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out =", "i in range (0, len(rom), 4): if (i % 16", "nysa.common.status import Status class Test (unittest.TestCase): \"\"\"Unit test SDB Tree\"\"\"", "sdb_component as sdbc from nysa.cbuilder import sdb_object_model as som from", "test_generate_one_sub_bus_with_url(self): rom_in = ROMD #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out =", ": %s %s\" % (rom[i], rom[i + 1], rom[i +", "last_val == 0x80: print \"Integration\" elif last_val == 0x81: print", "Data\", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROM2 = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\", "nysa.cbuilder.som_rom_parser import parse_rom_image from nysa.cbuilder.som_rom_generator import generate_rom_image from nysa.cbuilder.sdb import", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\", "from nysa.common.status import Status class Test (unittest.TestCase): \"\"\"Unit test SDB", "Data\", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\")", "\"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\", "+ 1], rom_in[i + 2], rom_in[i + 3], rom_out[i], rom_out[i", "SDBError from nysa.common.status import StatusLevel from nysa.common.status import Status class", "if (i % 16 == 0): magic = \"0x%s\" %", "== 0x01: print \"Device\" elif last_val == 0x02: print \"Bridge\"", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROM2 = \"5344422D\\n\"\\ \"00020100\\n\"\\", "\"00000340\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"53444200\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F73\\n\"\\ \"6472616D\\n\"\\ \"00000000\\n\"\\", "sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\", "rom_out) self.assertEqual(rom_in, rom_out) def compare_roms(rom_in, rom_out): if len(rom_in) != len(rom_out):", "\"Length of rom is not equal!\" return rom_in = rom_in.splitlines()", "\"6472616D\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\", "\"00000003\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652032\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROMD = \"5344422D\\n\"\\ \"00020100\\n\"\\", "\"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000502\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "= sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def compare_roms(rom_in, rom_out):", "rom_out[i + 2], rom_out[i + 3]) def print_sdb_rom(rom): #rom =", "sdbc.create_integration_record(\"Integration Data\", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral,", "== 0xFF: print \"Empty\" else: print \"???\" if rom_in[i] ==", "!= %s %s : %s %s\" % (rom_in[i], rom_in[i +", "rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in, rom_out) ''' def test_full_dionysus_read(self): from nysa.host.platform_scanner", "\"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000340\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"53444200\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "!= len(rom_out): print \"Length of rom is not equal!\" return", "in range (0, len(rom_in), 4): if (i % 16 ==", "range (0, len(rom), 4): if (i % 16 == 0):", "class Test (unittest.TestCase): \"\"\"Unit test SDB Tree\"\"\" def setUp(self): pass", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\", "rom_out) def test_generate_one_sub_bus_with_url(self): rom_in = ROMD #print_sdb(rom) sm = parse_rom_image(rom_in)", "== rom_out[i + 3]: print \"%s %s : %s %s\"", "\"00000000\\n\"\\ \"00000040\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\", "size = 0x20000) intr = sdbc.create_integration_record(\"Integration Data\", vendor_id = 0x800BEAF15DEADC03,", "+ 1], rom[i + 2], rom[i + 3]) ROM1 =", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\", "= sdbc.create_integration_record(\"Integration Data\", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) url", "url) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROM2 = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"03000000\\n\"\\", "\"00000000\\n\"\\ \"000000FF\" ROM2 = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"03000000\\n\"\\ \"00000000\\n\"\\", "= 0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") sm.insert_component(root, url)", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"80000000\\n\"\\", "import string sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) from nysa.cbuilder import sdb_component as", "3]) ROM1 = \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\", "sdbc.create_integration_record(\"Integration Data\", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) url =", "\"03000000\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "sdbc.create_device_record(name = \"device 1\", size = 0x100) d2 = sdbc.create_device_record(name", "sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) self.assertEqual(rom_in, rom_out) def test_full_bus_with_integration(self): sm = som.SOM() sm.initialize_root()", "= generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"00000000\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\", "nysa.cbuilder.som_rom_generator import generate_rom_image from nysa.cbuilder.sdb import SDBInfo from nysa.cbuilder.sdb import", "generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2 #print_sdb_rom(rom_in) sm =", "rom_out[i + 1], rom_out[i + 2], rom_out[i + 3]) def", "return dionysus = platforms[platforms.keys()[0]] #print \"Found Dionysus\" s.set_level(\"fatal\") s.Verbose(\"Read SDB\")", "if len(platforms) == 0: return dionysus = platforms[platforms.keys()[0]] #print \"Found", "= ROMD #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out", "rom_in.splitlines() rom_out = rom_out.splitlines() for i in range (0, len(rom_in),", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROMD = \"5344422D\\n\"\\", "sm.insert_component(root, synthesis) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory,", "= sdbc.create_device_record(name = \"memory 2\", size = 0x20000) intr =", "os.pardir, os.pardir)) from nysa.cbuilder import sdb_component as sdbc from nysa.cbuilder", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROM2 =", "\"00000502\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self):", "\"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "def setUp(self): pass ''' def test_simple_rom(self): rom_in = ROM1 som", "Status() platform_instance = platform_dict[\"dionysus\"](s) platforms = platform_instance.scan() if len(platforms) ==", "\"Synthesis\" elif last_val == 0xFF: print \"Empty\" else: print \"???\"", "from nysa.common.status import StatusLevel from nysa.common.status import Status class Test", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\", "\"000000FF\" ROMD = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\", "%s : %s %s\" % (rom_in[i], rom_in[i + 1], rom_in[i", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\", "\"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "def test_simple_rom(self): rom_in = ROM1 som = parse_rom_image(rom_in) rom_out =", "\"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000502\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\", "0xFF: print \"Empty\" else: print \"???\" print \"%s %s :", "= \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "print \"ROM\" for i in range (0, len(rom), 4): if", "return s = Status() platform_instance = platform_dict[\"dionysus\"](s) platforms = platform_instance.scan()", "\"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom)", "\"device 1\", size = 0x100) d2 = sdbc.create_device_record(name = \"device", "tool\", 1.0, \"jeff\") sm.insert_component(root, url) sm.insert_component(root, synthesis) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr)", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F67\\n\"\\ \"70696F00\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "+ 1] and rom_in[i + 2] == rom_out[i + 2]", "\"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "\"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000340\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "\"00000000\\n\"\\ \"10000008\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F67\\n\"\\ \"70696F00\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def compare_roms(rom_in, rom_out): if len(rom_in)", "\"dionysus\" not in platform_names: return s = Status() platform_instance =", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\", "%s %s != %s %s : %s %s\" % (rom_in[i],", "\"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "not equal!\" return rom_in = rom_in.splitlines() rom_out = rom_out.splitlines() for", "generate_rom_image(som) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in, rom_out) ''' def test_full_dionysus_read(self): from", "s = Status() platform_instance = platform_dict[\"dionysus\"](s) platforms = platform_instance.scan() if", "\"00000207\\n\"\\ \"00000000\\n\"\\ \"10000000\\n\"\\ \"00000000\\n\"\\ \"10000008\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\", "= 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") synthesis = sdbc.create_synthesis_record(\"Synthesis Name\", 123,", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\", "\"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\", "size = 0x100) d2 = sdbc.create_device_record(name = \"device 2\", size", "import SDBError from nysa.common.status import StatusLevel from nysa.common.status import Status", "2\", size = 0x20000) peripheral.set_child_spacing(0x0010000000) root.set_child_spacing (0x0100000000) sm.insert_component(peripheral, d1) sm.insert_component(peripheral,", "m2 = sdbc.create_device_record(name = \"memory 2\", size = 0x20000) intr", "generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out =", "1.0, \"jeff\") sm.insert_component(root, url) sm.insert_component(root, synthesis) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral,", "else: print \"???\" if rom_in[i] == rom_out[i] and rom_in[i +", "\"72792031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"00000000\\n\"\\ \"00030000\\n\"\\", "(rom_in[i], rom_in[i + 1], rom_in[i + 2], rom_in[i + 3])", "2\", size = 0x100) m1 = sdbc.create_device_record(name = \"memory 1\",", "root.set_child_spacing (0x0100000000) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2)", "\"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "from nysa.cbuilder.sdb import SDBError from nysa.common.status import StatusLevel from nysa.common.status", "\"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) self.assertEqual(rom_in, rom_out) def", "== hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0): print \"Interconnect\" elif last_val", "root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name(\"peripheral\") memory = sm.insert_bus()", "rom_out.splitlines() for i in range (0, len(rom_in), 4): if (i", "\"device 2\", size = 0x100) m1 = sdbc.create_device_record(name = \"memory", "ROMD #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out =", "(unittest.TestCase): \"\"\"Unit test SDB Tree\"\"\" def setUp(self): pass ''' def", "sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out)", "\"00000002\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory,", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\", "last_val == 0): print \"Interconnect\" elif last_val == 0x01: print", "\"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "url = sdbc.create_repo_url_record(\"http://www.geocities.com\") synthesis = sdbc.create_synthesis_record(\"Synthesis Name\", 123, \"cool tool\",", "= ROM1 som = parse_rom_image(rom_in) rom_out = generate_rom_image(som) rom_out =", "synthesis) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1)", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in =", "= 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") sm.insert_component(root, url) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr)", "print \"%s %s : %s %s\" % (rom[i], rom[i +", "= 0x20000) intr = sdbc.create_integration_record(\"Integration Data\", vendor_id = 0x800BEAF15DEADC03, device_id", "last_val == 0x81: print \"URL\" elif last_val == 0x82: print", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\", "sdbc.create_device_record(name = \"memory 2\", size = 0x20000) intr = sdbc.create_integration_record(\"Integration", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\", "\"0x%s\" % (rom[i].lower()) last_val = int(rom[i + 15], 16) &", "sdbc.create_device_record(name = \"memory 1\", size = 0x10000) m2 = sdbc.create_device_record(name", "os import string sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) from nysa.cbuilder import sdb_component", "rom_in[i + 3] == rom_out[i + 3]: print \"%s %s", "\"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\", "\"000000FF\" ROM2 = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"03000000\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\", "print_sdb_rom(rom): #rom = sdbc.convert_rom_to_32bit_buffer(rom) rom = rom.splitlines() print \"ROM\" for", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\", "print \"URL\" elif last_val == 0x82: print \"Synthesis\" elif last_val", "return rom_in = rom_in.splitlines() rom_out = rom_out.splitlines() for i in", "\"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"\"\"Unit test SDB Tree\"\"\" def setUp(self): pass ''' def test_simple_rom(self):", "\"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F67\\n\"\\ \"70696F00\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\", "\"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"jeff\") sm.insert_component(root, url) sm.insert_component(root, synthesis) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1)", "\"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\", "\"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"00000000\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"20000000\\n\"\\ \"80000000\\n\"\\", "rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm)", "rom_in[i + 2], rom_in[i + 3], rom_out[i], rom_out[i + 1],", "print \"Synthesis\" elif last_val == 0xFF: print \"Empty\" else: print", "= int(rom[i + 15], 16) & 0xFF print \"\" if", "\"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000003\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "ROM1 = \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "for i in range (0, len(rom_in), 4): if (i %", "from nysa.cbuilder.sdb import SDBWarning from nysa.cbuilder.sdb import SDBError from nysa.common.status", "= int(rom_in[i + 15], 16) & 0xFF print \"\" if", "0x100) d2 = sdbc.create_device_record(name = \"device 2\", size = 0x100)", "\"memory 1\", size = 0x10000) m2 = sdbc.create_device_record(name = \"memory", "import parse_rom_image from nysa.cbuilder.som_rom_generator import generate_rom_image from nysa.cbuilder.sdb import SDBInfo", "+ 1] == rom_out[i + 1] and rom_in[i + 2]", "123, \"cool tool\", 1.0, \"jeff\") sm.insert_component(root, url) sm.insert_component(root, synthesis) peripheral.set_child_spacing(0x0100000000)", "= sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2 #print_sdb_rom(rom_in) sm = parse_rom_image(rom_in) rom_out", "compare_roms(rom_in, rom_out): if len(rom_in) != len(rom_out): print \"Length of rom", "%s %s\" % (rom[i], rom[i + 1], rom[i + 2],", "\"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"URL\" elif last_val == 0x82: print \"Synthesis\" elif last_val ==", "pscanner.get_platforms() platform_names = platform_dict.keys() if \"dionysus\" not in platform_names: return", "\"cool tool\", 1.0, \"jeff\") sm.insert_component(root, url) sm.insert_component(root, synthesis) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral,", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72792032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\", "\"6D656D6F\\n\"\\ \"72792032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "0: return dionysus = platforms[platforms.keys()[0]] #print \"Found Dionysus\" s.set_level(\"fatal\") s.Verbose(\"Read", "hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0): print \"Interconnect\" elif last_val ==", "\"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "16) & 0xFF print \"\" if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and", "rom_out): if len(rom_in) != len(rom_out): print \"Length of rom is", "0x10000) m2 = sdbc.create_device_record(name = \"memory 2\", size = 0x20000)", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00010100\\n\"\\", "import generate_rom_image from nysa.cbuilder.sdb import SDBInfo from nysa.cbuilder.sdb import SDBWarning", "%s : %s %s != %s %s : %s %s\"", "\"20000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\", "0): print \"Interconnect\" elif last_val == 0x01: print \"Device\" elif", "\"63652032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name(\"peripheral\") memory", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\", "synthesis = sdbc.create_synthesis_record(\"Synthesis Name\", 123, \"cool tool\", 1.0, \"jeff\") sm.insert_component(root,", "\"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\", "import SDBInfo from nysa.cbuilder.sdb import SDBWarning from nysa.cbuilder.sdb import SDBError", "= \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"03000000\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\", "\"10000008\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F67\\n\"\\ \"70696F00\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "rom_in[i + 1], rom_in[i + 2], rom_in[i + 3], rom_out[i],", "\"00000100\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"ROM\" for i in range (0, len(rom), 4): if (i", "size = 0x100) m1 = sdbc.create_device_record(name = \"memory 1\", size", "Test (unittest.TestCase): \"\"\"Unit test SDB Tree\"\"\" def setUp(self): pass '''", "= platform_dict.keys() if \"dionysus\" not in platform_names: return s =", "= generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out)", "\"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\", "%s : %s %s\" % (rom[i], rom[i + 1], rom[i", "and rom_in[i + 1] == rom_out[i + 1] and rom_in[i", "\"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000003\\n\"\\ \"00000100\\n\"\\", "3], rom_out[i], rom_out[i + 1], rom_out[i + 2], rom_out[i +", "= sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out", "nysa.cbuilder import sdb_object_model as som from nysa.cbuilder.som_rom_parser import parse_rom_image from", "% (rom_in[i].lower()) last_val = int(rom_in[i + 15], 16) & 0xFF", "intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom", "\"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000502\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "rom_in[i + 3]) else: print \"%s %s : %s %s", "\"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "def compare_roms(rom_in, rom_out): if len(rom_in) != len(rom_out): print \"Length of", "platform_instance = platform_dict[\"dionysus\"](s) platforms = platform_instance.scan() if len(platforms) == 0:", "\"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000040\\n\"\\ \"00000001\\n\"\\", "len(rom_out): print \"Length of rom is not equal!\" return rom_in", "3]: print \"%s %s : %s %s\" % (rom_in[i], rom_in[i", "= sdbc.create_device_record(name = \"device 2\", size = 0x100) m1 =", "rom_out[i], rom_out[i + 1], rom_out[i + 2], rom_out[i + 3])", "\"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "print \"Interconnect\" elif last_val == 0x01: print \"Device\" elif last_val", "\"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "\"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\", "\"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000502\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "2], rom[i + 3]) ROM1 = \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "%s\" % (rom_in[i], rom_in[i + 1], rom_in[i + 2], rom_in[i", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\", "+ 3]) ROM1 = \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\", "and last_val == 0): print \"Interconnect\" elif last_val == 0x01:", "in platform_names: return s = Status() platform_instance = platform_dict[\"dionysus\"](s) platforms", "\"Integration\" elif last_val == 0x81: print \"URL\" elif last_val ==", "rom_in[i + 2], rom_in[i + 3]) else: print \"%s %s", "print \"Empty\" else: print \"???\" print \"%s %s : %s", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\", "0x20000) peripheral.set_child_spacing(0x0010000000) root.set_child_spacing (0x0100000000) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1)", "from nysa.host.platform_scanner import PlatformScanner pscanner = PlatformScanner() platform_dict = pscanner.get_platforms()", "\"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000101\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"10000000\\n\"\\ \"00000000\\n\"\\ \"10000008\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\", "= 0x00000000) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory,", "\"00000207\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000003\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\", "= 0x10000) m2 = sdbc.create_device_record(name = \"memory 2\", size =", "1], rom_out[i + 2], rom_out[i + 3]) def print_sdb_rom(rom): #rom", "rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2 #print_sdb_rom(rom_in) sm = parse_rom_image(rom_in)", "memory = sm.insert_bus() memory.set_name(\"memory\") d1 = sdbc.create_device_record(name = \"device 1\",", "1], rom_in[i + 2], rom_in[i + 3], rom_out[i], rom_out[i +", "\"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\", "print \"Bridge\" elif last_val == 0x80: print \"Integration\" elif last_val", "\"00020100\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\", "rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in,", "def test_full_bus(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000003\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\", "0x80: print \"Integration\" elif last_val == 0x81: print \"URL\" elif", "= rom_in.splitlines() rom_out = rom_out.splitlines() for i in range (0,", "elif last_val == 0x82: print \"Synthesis\" elif last_val == 0xFF:", "from nysa.cbuilder.som_rom_parser import parse_rom_image from nysa.cbuilder.som_rom_generator import generate_rom_image from nysa.cbuilder.sdb", "== 0xFF: print \"Empty\" else: print \"???\" print \"%s %s", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"746F7000\\n\"\\", "0x01: print \"Device\" elif last_val == 0x02: print \"Bridge\" elif", "\"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"03000000\\n\"\\ \"00000000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "parse_rom_image from nysa.cbuilder.som_rom_generator import generate_rom_image from nysa.cbuilder.sdb import SDBInfo from", "= \"memory 1\", size = 0x10000) m2 = sdbc.create_device_record(name =", "sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out =", "1], rom_in[i + 2], rom_in[i + 3]) else: print \"%s", "sdbc.create_repo_url_record(\"http://www.geocities.com\") sm.insert_component(root, url) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2)", "\"00000000\\n\"\\ \"000000FF\" ROMD = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\", "rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) self.assertEqual(rom_in, rom_out) def test_full_bus_with_integration(self): sm =", "= sdbc.create_synthesis_record(\"Synthesis Name\", 123, \"cool tool\", 1.0, \"jeff\") sm.insert_component(root, url)", "\"00010100\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\", "\"00000207\\n\"\\ \"00000000\\n\"\\ \"00010000\\n\"\\ \"00000000\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\", "+ 1], rom_out[i + 2], rom_out[i + 3]) def print_sdb_rom(rom):", "\"%s %s : %s %s\" % (rom[i], rom[i + 1],", "som = parse_rom_image(rom_in) rom_out = generate_rom_image(som) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in,", "\"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "platform_dict[\"dionysus\"](s) platforms = platform_instance.scan() if len(platforms) == 0: return dionysus", "not in platform_names: return s = Status() platform_instance = platform_dict[\"dionysus\"](s)", "device_id = 0x00000000) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2)", "\"Interconnect\" elif last_val == 0x01: print \"Device\" elif last_val ==", "device_id = 0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") synthesis = sdbc.create_synthesis_record(\"Synthesis Name\",", "rom_in[i + 2] == rom_out[i + 2] and rom_in[i +", "\"0x%s\" % (rom_in[i].lower()) last_val = int(rom_in[i + 15], 16) &", "%s != %s %s : %s %s\" % (rom_in[i], rom_in[i", "rom[i + 2], rom[i + 3]) ROM1 = \"5344422D\\n\"\\ \"00010100\\n\"\\", "\"00000001\\n\"\\ \"140F0105\\n\"\\ \"64657669\\n\"\\ \"63652031\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000001\\n\"\\", ": %s %s\" % (rom_in[i], rom_in[i + 1], rom_in[i +", "= rom_out.splitlines() for i in range (0, len(rom_in), 4): if", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\" ROMD = \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\", "import json import sys import os import string sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir,", "\"00000001\\n\"\\ \"140F0106\\n\"\\ \"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\", "of rom is not equal!\" return rom_in = rom_in.splitlines() rom_out", "\"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"80000000\\n\"\\", "+ 2], rom_out[i + 3]) def print_sdb_rom(rom): #rom = sdbc.convert_rom_to_32bit_buffer(rom)", "\"70657269\\n\"\\ \"70686572\\n\"\\ \"616C0000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "def test_full_bus_with_integration(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral", "platforms = platform_instance.scan() if len(platforms) == 0: return dionysus =", "sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm", "m1 = sdbc.create_device_record(name = \"memory 1\", size = 0x10000) m2", "PlatformScanner() platform_dict = pscanner.get_platforms() platform_names = platform_dict.keys() if \"dionysus\" not", "rom_out = generate_rom_image(som) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in, rom_out) ''' def", "sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def compare_roms(rom_in, rom_out): if", "string sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) from nysa.cbuilder import sdb_component as sdbc", "= som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name(\"peripheral\")", "as sdbc from nysa.cbuilder import sdb_object_model as som from nysa.cbuilder.som_rom_parser", "+ 3]) else: print \"%s %s : %s %s !=", "0): magic = \"0x%s\" % (rom[i].lower()) last_val = int(rom[i +", "+ 15], 16) & 0xFF print \"\" if (magic ==", "dionysus = platforms[platforms.keys()[0]] #print \"Found Dionysus\" s.set_level(\"fatal\") s.Verbose(\"Read SDB\") dionysus.read_sdb()", "rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def compare_roms(rom_in,", "= \"0x%s\" % (rom[i].lower()) last_val = int(rom[i + 15], 16)", "\"???\" if rom_in[i] == rom_out[i] and rom_in[i + 1] ==", "sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in, rom_out) ''' def test_full_dionysus_read(self): from nysa.host.platform_scanner import PlatformScanner", "0x00000000) url = sdbc.create_repo_url_record(\"http://www.geocities.com\") sm.insert_component(root, url) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral,", "rom_in = ROM1 som = parse_rom_image(rom_in) rom_out = generate_rom_image(som) rom_out", "json import sys import os import string sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))", "m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2", "%s\" % (rom[i], rom[i + 1], rom[i + 2], rom[i", "\"00000040\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\", "\"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0106\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\ \"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\", "sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2 #print_sdb_rom(rom_in) sm = parse_rom_image(rom_in) rom_out =", "+ 2], rom_in[i + 3], rom_out[i], rom_out[i + 1], rom_out[i", "+ 3] == rom_out[i + 3]: print \"%s %s :", "setUp(self): pass ''' def test_simple_rom(self): rom_in = ROM1 som =", "2], rom_out[i + 3]) def print_sdb_rom(rom): #rom = sdbc.convert_rom_to_32bit_buffer(rom) rom", "\"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00010100\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00800000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\", "test_full_dionysus_read(self): from nysa.host.platform_scanner import PlatformScanner pscanner = PlatformScanner() platform_dict =", "\"64657669\\n\"\\ \"63652032\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000001\\n\"\\ \"140F0107\\n\"\\ \"77625F73\\n\"\\ \"6472616D\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "+ 1], rom_in[i + 2], rom_in[i + 3]) else: print", "int(rom[i + 15], 16) & 0xFF print \"\" if (magic", "size = 0x10000) m2 = sdbc.create_device_record(name = \"memory 2\", size", "= sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): sm", "if rom_in[i] == rom_out[i] and rom_in[i + 1] == rom_out[i", "last_val = int(rom[i + 15], 16) & 0xFF print \"\"", "\"140F0106\\n\"\\ \"746F7000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000020\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "test SDB Tree\"\"\" def setUp(self): pass ''' def test_simple_rom(self): rom_in", "rom_in[i + 3], rom_out[i], rom_out[i + 1], rom_out[i + 2],", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"000000FF\\n\"\\ \"5344422D\\n\"\\ \"00020100\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000100\\n\"\\ \"00000000\\n\"\\", "== 0x02: print \"Bridge\" elif last_val == 0x80: print \"Integration\"", "and rom_in[i + 2] == rom_out[i + 2] and rom_in[i", "(rom_in[i].lower()) last_val = int(rom_in[i + 15], 16) & 0xFF print", "= \"device 2\", size = 0x100) m1 = sdbc.create_device_record(name =", "(0, len(rom_in), 4): if (i % 16 == 0): magic", "else: print \"???\" print \"%s %s : %s %s\" %", "\"00000200\\n\"\\ \"00030000\\n\"\\ \"80000000\\n\"\\ \"0000C594\\n\"\\ \"00000001\\n\"\\ \"00000001\\n\"\\ \"140F0105\\n\"\\ \"6D656D6F\\n\"\\ \"72790000\\n\"\\ \"00000000\\n\"\\", "2] == rom_out[i + 2] and rom_in[i + 3] ==", "\"00000000\\n\"\\ \"00000002\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\ \"00000000\\n\"\\", "\"00000000\\n\"\\ \"00000000\\n\"\\ \"00000001\\n\"\\ \"00000101\\n\"\\ \"00000207\\n\"\\ \"00000000\\n\"\\ \"10000000\\n\"\\ \"00000000\\n\"\\ \"10000008\\n\"\\ \"80000000\\n\"\\" ]
[ "layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout),", "data_in.batch(1024) arch = params_in[\"Architecture\"] dropout = params_in[\"Dropout\"] lr = params_in[\"LearningRate\"]", "activation=\"relu\"), layers.Dropout(dropout), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif arch ==", "monitor='loss', verbos=0, save_best_only=True, save_freq='epoch') model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2]) df_loss =", "self.epoch_loss[epoch] = logs.get(\"loss\") losses.append(self.epoch_loss[epoch]) if params_in[\"ResumeTraining\"]: model.load_weights(filepath) checkpoint2 = CustomModelCheckPoint()", "layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif", "verbos=0, save_best_only=True, save_freq='epoch') model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2]) df_loss = pd.DataFrame()", "input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1,", "import tensorflow as tf from tensorflow.keras import layers os.environ[\"CUDA_VISIBLE_DEVICES\"] =", "class CustomModelCheckPoint(tf.keras.callbacks.Callback): def __init__(self, **kargs): super(CustomModelCheckPoint, self).__init__(**kargs) self.epoch_loss = {}", "kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5,", "layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ])", "params_in[\"LearningRate\"] attrs = params_in[\"Attrs\"] epochs = params_in[\"Epochs\"] if arch ==", "10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ]) else: model =", "done on beginning of epoch. return def on_epoch_end(self, epoch, logs={}):", "checkpoint2]) df_loss = pd.DataFrame() df_loss[\"Epochs\"] = list(range(1, epochs + 1))", "\"relu\"), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\",", "epoch self.epoch_loss[epoch] = logs.get(\"loss\") losses.append(self.epoch_loss[epoch]) if params_in[\"ResumeTraining\"]: model.load_weights(filepath) checkpoint2 =", "]) model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True)) filepath = \"./checkpoints/Model_in-\" + arch +", "beginning of epoch. return def on_epoch_end(self, epoch, logs={}): # things", "data_in = data_in.batch(1024) arch = params_in[\"Architecture\"] dropout = params_in[\"Dropout\"] lr", "layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True),", "logs={}): # things done on end of the epoch self.epoch_loss[epoch]", "= tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True, save_freq='epoch') model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2])", "params_in[\"ResumeTraining\"]: model.load_weights(filepath) checkpoint2 = CustomModelCheckPoint() checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0,", "else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\", input_shape=(1, 50,", "layers os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" # gpu_devices = tf.config.experimental.list_physical_devices(\"GPU\") # for", "of epoch. return def on_epoch_end(self, epoch, logs={}): # things done", "super(CustomModelCheckPoint, self).__init__(**kargs) self.epoch_loss = {} # accuracy at given epoch", "layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif arch == \"CNN-2LSTM\": if params_in[\"BatchNorm\"]:", "activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ])", "padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"),", "activation=\"relu\", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout),", "\"CNN-LSTM\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\",", "layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr,", "= CustomModelCheckPoint() checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True, save_freq='epoch') model.fit(data_in,", "tensorflow.keras import layers os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" # gpu_devices = tf.config.experimental.list_physical_devices(\"GPU\")", "padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Flatten(), layers.Dense(50,", "kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30,", "padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)),", "= pd.DataFrame() df_loss[\"Epochs\"] = list(range(1, epochs + 1)) df_loss[\"Loss\"] =", "activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30,", "= params_in[\"Attrs\"] epochs = params_in[\"Epochs\"] if arch == \"BaseCNN\": if", "on_epoch_begin(self, epoch, logs={}): # Things done on beginning of epoch.", "params_in[\"Architecture\"] dropout = params_in[\"Dropout\"] lr = params_in[\"LearningRate\"] attrs = params_in[\"Attrs\"]", "pd import tensorflow as tf from tensorflow.keras import layers os.environ[\"CUDA_VISIBLE_DEVICES\"]", "10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True))", "import os import pandas as pd import tensorflow as tf", "activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1)", "layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif arch == \"CNN-2LSTM\":", "params_in[\"Dropout\"] lr = params_in[\"LearningRate\"] attrs = params_in[\"Attrs\"] epochs = params_in[\"Epochs\"]", "kernel_size=5, padding=\"same\", activation=\"relu\", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding=\"same\",", "= tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\", input_shape=(1, 50, attrs)), layers.Dropout(dropout),", "padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Flatten(),", "return_sequences=False), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\",", "save_best_only=True, save_freq='epoch') model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2]) df_loss = pd.DataFrame() df_loss[\"Epochs\"]", "elif arch == \"CNN-2LSTM\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10,", "# tf.config.experimental.set_memory_growth(device, True) def trainModel(data_in, params_in): data_in = data_in.take(2048) data_in", "kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Flatten(),", "tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10,", "layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) else:", "layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ])", "def on_epoch_end(self, epoch, logs={}): # things done on end of", "logs.get(\"loss\") losses.append(self.epoch_loss[epoch]) if params_in[\"ResumeTraining\"]: model.load_weights(filepath) checkpoint2 = CustomModelCheckPoint() checkpoint =", "trainModel(data_in, params_in): data_in = data_in.take(2048) data_in = data_in.shuffle(24) data_in =", "if params_in[\"ResumeTraining\"]: model.load_weights(filepath) checkpoint2 = CustomModelCheckPoint() checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss',", "layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"),", "data_in.shuffle(24) data_in = data_in.batch(1024) arch = params_in[\"Architecture\"] dropout = params_in[\"Dropout\"]", "#!python3 import os import pandas as pd import tensorflow as", "layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True)) filepath", "amsgrad=True)) filepath = \"./checkpoints/Model_in-\" + arch + str(attrs) + \".h5\"", "arch == \"CNN-2LSTM\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5,", "done on end of the epoch self.epoch_loss[epoch] = logs.get(\"loss\") losses.append(self.epoch_loss[epoch])", "layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ]) else:", "tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True, save_freq='epoch') model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2]) df_loss", "return_sequences=False), layers.Dense(1) ]) model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True)) filepath = \"./checkpoints/Model_in-\" +", "layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10,", "\"./checkpoints/Model_in-\" + arch + str(attrs) + \".h5\" losses = []", "= params_in[\"Architecture\"] dropout = params_in[\"Dropout\"] lr = params_in[\"LearningRate\"] attrs =", "# gpu_devices = tf.config.experimental.list_physical_devices(\"GPU\") # for device in gpu_devices: #", "str(attrs) + \".h5\" losses = [] class CustomModelCheckPoint(tf.keras.callbacks.Callback): def __init__(self,", "data_in = data_in.shuffle(24) data_in = data_in.batch(1024) arch = params_in[\"Architecture\"] dropout", "the epoch self.epoch_loss[epoch] = logs.get(\"loss\") losses.append(self.epoch_loss[epoch]) if params_in[\"ResumeTraining\"]: model.load_weights(filepath) checkpoint2", "save_freq='epoch') model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2]) df_loss = pd.DataFrame() df_loss[\"Epochs\"] =", "data_in = data_in.take(2048) data_in = data_in.shuffle(24) data_in = data_in.batch(1024) arch", "epoch, logs={}): # things done on end of the epoch", "gpu_devices: # tf.config.experimental.set_memory_growth(device, True) def trainModel(data_in, params_in): data_in = data_in.take(2048)", "pd.DataFrame() df_loss[\"Epochs\"] = list(range(1, epochs + 1)) df_loss[\"Loss\"] = losses", "os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" # gpu_devices = tf.config.experimental.list_physical_devices(\"GPU\") # for device", "padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1)", "layers.Dense(1) ]) elif arch == \"CNN-LSTM\": if params_in[\"BatchNorm\"]: model =", "return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif arch == \"CNN-2LSTM\": if", "# things done on end of the epoch self.epoch_loss[epoch] =", "layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) else: model", "50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5,", "return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10,", "tf from tensorflow.keras import layers os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" # gpu_devices", "def __init__(self, **kargs): super(CustomModelCheckPoint, self).__init__(**kargs) self.epoch_loss = {} # accuracy", "= params_in[\"Dropout\"] lr = params_in[\"LearningRate\"] attrs = params_in[\"Attrs\"] epochs =", "if arch == \"BaseCNN\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10,", "epochs = params_in[\"Epochs\"] if arch == \"BaseCNN\": if params_in[\"BatchNorm\"]: model", "== \"CNN-2LSTM\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\",", "kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ])", "arch == \"BaseCNN\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5,", "layers.Dense(1) ]) elif arch == \"CNN-2LSTM\": if params_in[\"BatchNorm\"]: model =", "= data_in.batch(1024) arch = params_in[\"Architecture\"] dropout = params_in[\"Dropout\"] lr =", "== \"BaseCNN\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\",", "at given epoch def on_epoch_begin(self, epoch, logs={}): # Things done", "= \"0\" # gpu_devices = tf.config.experimental.list_physical_devices(\"GPU\") # for device in", "# Things done on beginning of epoch. return def on_epoch_end(self,", "layers.Dense(1) ]) model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True)) filepath = \"./checkpoints/Model_in-\" + arch", "layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True),", "layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30,", "= \"./checkpoints/Model_in-\" + arch + str(attrs) + \".h5\" losses =", "layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ]) else: model", "optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True)) filepath = \"./checkpoints/Model_in-\" + arch + str(attrs) +", "layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50,", "gpu_devices = tf.config.experimental.list_physical_devices(\"GPU\") # for device in gpu_devices: # tf.config.experimental.set_memory_growth(device,", "= [] class CustomModelCheckPoint(tf.keras.callbacks.Callback): def __init__(self, **kargs): super(CustomModelCheckPoint, self).__init__(**kargs) self.epoch_loss", "arch = params_in[\"Architecture\"] dropout = params_in[\"Dropout\"] lr = params_in[\"LearningRate\"] attrs", "__init__(self, **kargs): super(CustomModelCheckPoint, self).__init__(**kargs) self.epoch_loss = {} # accuracy at", "dropout = params_in[\"Dropout\"] lr = params_in[\"LearningRate\"] attrs = params_in[\"Attrs\"] epochs", "\"relu\"), layers.Dense(1) ]) elif arch == \"CNN-2LSTM\": if params_in[\"BatchNorm\"]: model", "= logs.get(\"loss\") losses.append(self.epoch_loss[epoch]) if params_in[\"ResumeTraining\"]: model.load_weights(filepath) checkpoint2 = CustomModelCheckPoint() checkpoint", "model.load_weights(filepath) checkpoint2 = CustomModelCheckPoint() checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True,", "import pandas as pd import tensorflow as tf from tensorflow.keras", "\"CNN-2LSTM\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\",", "callbacks=[checkpoint, checkpoint2]) df_loss = pd.DataFrame() df_loss[\"Epochs\"] = list(range(1, epochs +", "os import pandas as pd import tensorflow as tf from", "= tf.config.experimental.list_physical_devices(\"GPU\") # for device in gpu_devices: # tf.config.experimental.set_memory_growth(device, True)", "{} # accuracy at given epoch def on_epoch_begin(self, epoch, logs={}):", "self).__init__(**kargs) self.epoch_loss = {} # accuracy at given epoch def", "10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) else: model =", "layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True)) filepath = \"./checkpoints/Model_in-\"", "# for device in gpu_devices: # tf.config.experimental.set_memory_growth(device, True) def trainModel(data_in,", "params_in[\"Epochs\"] if arch == \"BaseCNN\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([", "+ \".h5\" losses = [] class CustomModelCheckPoint(tf.keras.callbacks.Callback): def __init__(self, **kargs):", "activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ]) else: model", "10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif arch ==", "= params_in[\"LearningRate\"] attrs = params_in[\"Attrs\"] epochs = params_in[\"Epochs\"] if arch", "padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1)", "Things done on beginning of epoch. return def on_epoch_end(self, epoch,", "losses.append(self.epoch_loss[epoch]) if params_in[\"ResumeTraining\"]: model.load_weights(filepath) checkpoint2 = CustomModelCheckPoint() checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath,", "arch + str(attrs) + \".h5\" losses = [] class CustomModelCheckPoint(tf.keras.callbacks.Callback):", "layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False),", "self.epoch_loss = {} # accuracy at given epoch def on_epoch_begin(self,", "= data_in.take(2048) data_in = data_in.shuffle(24) data_in = data_in.batch(1024) arch =", "layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5,", "elif arch == \"CNN-LSTM\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10,", "+ arch + str(attrs) + \".h5\" losses = [] class", "df_loss[\"Epochs\"] = list(range(1, epochs + 1)) df_loss[\"Loss\"] = losses df_loss.to_csv(\"./losses/lossTrend.csv\",", "arch == \"CNN-LSTM\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5,", "padding=\"same\", activation=\"relu\", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\"),", "epochs=epochs, callbacks=[checkpoint, checkpoint2]) df_loss = pd.DataFrame() df_loss[\"Epochs\"] = list(range(1, epochs", "pandas as pd import tensorflow as tf from tensorflow.keras import", "params_in[\"Attrs\"] epochs = params_in[\"Epochs\"] if arch == \"BaseCNN\": if params_in[\"BatchNorm\"]:", "+ str(attrs) + \".h5\" losses = [] class CustomModelCheckPoint(tf.keras.callbacks.Callback): def", "model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True)) filepath = \"./checkpoints/Model_in-\" + arch + str(attrs)", "layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ]) else: model = tf.keras.Sequential([", "if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\", input_shape=(1,", "True) def trainModel(data_in, params_in): data_in = data_in.take(2048) data_in = data_in.shuffle(24)", "layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif arch == \"CNN-LSTM\": if", "layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\",", "layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) model.compile(loss=tf.losses.MeanSquaredError(),", "tensorflow as tf from tensorflow.keras import layers os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\"", "on beginning of epoch. return def on_epoch_end(self, epoch, logs={}): #", "return def on_epoch_end(self, epoch, logs={}): # things done on end", "checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True, save_freq='epoch') model.fit(data_in, epochs=epochs, callbacks=[checkpoint,", "model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2]) df_loss = pd.DataFrame() df_loss[\"Epochs\"] = list(range(1,", "activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Flatten(), layers.Dense(50,", "epoch. return def on_epoch_end(self, epoch, logs={}): # things done on", "tf.config.experimental.list_physical_devices(\"GPU\") # for device in gpu_devices: # tf.config.experimental.set_memory_growth(device, True) def", "[] class CustomModelCheckPoint(tf.keras.callbacks.Callback): def __init__(self, **kargs): super(CustomModelCheckPoint, self).__init__(**kargs) self.epoch_loss =", "end of the epoch self.epoch_loss[epoch] = logs.get(\"loss\") losses.append(self.epoch_loss[epoch]) if params_in[\"ResumeTraining\"]:", "def on_epoch_begin(self, epoch, logs={}): # Things done on beginning of", "\"BaseCNN\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\",", "on end of the epoch self.epoch_loss[epoch] = logs.get(\"loss\") losses.append(self.epoch_loss[epoch]) if", "logs={}): # Things done on beginning of epoch. return def", "as tf from tensorflow.keras import layers os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" #", "in gpu_devices: # tf.config.experimental.set_memory_growth(device, True) def trainModel(data_in, params_in): data_in =", "activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)),", "filepath = \"./checkpoints/Model_in-\" + arch + str(attrs) + \".h5\" losses", "data_in.take(2048) data_in = data_in.shuffle(24) data_in = data_in.batch(1024) arch = params_in[\"Architecture\"]", "as pd import tensorflow as tf from tensorflow.keras import layers", "params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\", input_shape=(1, 50,", "layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5,", "== \"CNN-LSTM\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\",", "\"0\" # gpu_devices = tf.config.experimental.list_physical_devices(\"GPU\") # for device in gpu_devices:", "model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\", input_shape=(1, 50, attrs)),", "]) elif arch == \"CNN-2LSTM\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([", "= data_in.shuffle(24) data_in = data_in.batch(1024) arch = params_in[\"Architecture\"] dropout =", "kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(),", "layers.Dropout(dropout), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif arch == \"CNN-LSTM\":", "activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1)", "padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False),", "padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif arch", "= {} # accuracy at given epoch def on_epoch_begin(self, epoch,", "given epoch def on_epoch_begin(self, epoch, logs={}): # Things done on", "import layers os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" # gpu_devices = tf.config.experimental.list_physical_devices(\"GPU\") #", "for device in gpu_devices: # tf.config.experimental.set_memory_growth(device, True) def trainModel(data_in, params_in):", "on_epoch_end(self, epoch, logs={}): # things done on end of the", "= list(range(1, epochs + 1)) df_loss[\"Loss\"] = losses df_loss.to_csv(\"./losses/lossTrend.csv\", index=False)", "layers.BatchNormalization(), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ]) else: model = tf.keras.Sequential([", "checkpoint2 = CustomModelCheckPoint() checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True, save_freq='epoch')", "layers.Dropout(dropout), layers.BatchNormalization(), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ]) else: model =", "tf.config.experimental.set_memory_growth(device, True) def trainModel(data_in, params_in): data_in = data_in.take(2048) data_in =", "lr = params_in[\"LearningRate\"] attrs = params_in[\"Attrs\"] epochs = params_in[\"Epochs\"] if", "layers.Dense(50, \"relu\"), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5,", "of the epoch self.epoch_loss[epoch] = logs.get(\"loss\") losses.append(self.epoch_loss[epoch]) if params_in[\"ResumeTraining\"]: model.load_weights(filepath)", "\"relu\"), layers.Dense(1) ]) elif arch == \"CNN-LSTM\": if params_in[\"BatchNorm\"]: model", "layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif arch", "epoch def on_epoch_begin(self, epoch, logs={}): # Things done on beginning", "return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True)) filepath =", "# accuracy at given epoch def on_epoch_begin(self, epoch, logs={}): #", "def trainModel(data_in, params_in): data_in = data_in.take(2048) data_in = data_in.shuffle(24) data_in", "kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50,", "attrs = params_in[\"Attrs\"] epochs = params_in[\"Epochs\"] if arch == \"BaseCNN\":", "attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\",", "kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"),", "]) elif arch == \"CNN-LSTM\": if params_in[\"BatchNorm\"]: model = tf.keras.Sequential([", "df_loss = pd.DataFrame() df_loss[\"Epochs\"] = list(range(1, epochs + 1)) df_loss[\"Loss\"]", "kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False),", "epoch, logs={}): # Things done on beginning of epoch. return", "from tensorflow.keras import layers os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" # gpu_devices =", "padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ]) else:", "layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False),", "layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Flatten(), layers.Dense(50, \"relu\"),", "CustomModelCheckPoint(tf.keras.callbacks.Callback): def __init__(self, **kargs): super(CustomModelCheckPoint, self).__init__(**kargs) self.epoch_loss = {} #", "layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) else: model = tf.keras.Sequential([", "layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ])", "layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30,", "]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding=\"same\", activation=\"relu\", input_shape=(1,", "params_in): data_in = data_in.take(2048) data_in = data_in.shuffle(24) data_in = data_in.batch(1024)", "device in gpu_devices: # tf.config.experimental.set_memory_growth(device, True) def trainModel(data_in, params_in): data_in", "return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10,", "things done on end of the epoch self.epoch_loss[epoch] = logs.get(\"loss\")", "layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1)", "padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5,", "\".h5\" losses = [] class CustomModelCheckPoint(tf.keras.callbacks.Callback): def __init__(self, **kargs): super(CustomModelCheckPoint,", "accuracy at given epoch def on_epoch_begin(self, epoch, logs={}): # Things", "losses = [] class CustomModelCheckPoint(tf.keras.callbacks.Callback): def __init__(self, **kargs): super(CustomModelCheckPoint, self).__init__(**kargs)", "kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif", "**kargs): super(CustomModelCheckPoint, self).__init__(**kargs) self.epoch_loss = {} # accuracy at given", "layers.Dense(50, \"relu\"), layers.Dense(1) ]) elif arch == \"CNN-LSTM\": if params_in[\"BatchNorm\"]:", "activation=\"relu\"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, \"relu\"), layers.Dense(1) ])", "CustomModelCheckPoint() checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True, save_freq='epoch') model.fit(data_in, epochs=epochs,", "layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Flatten(), layers.Dense(50, \"relu\"), layers.Dense(1)", "= params_in[\"Epochs\"] if arch == \"BaseCNN\": if params_in[\"BatchNorm\"]: model =", "activation=\"relu\"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding=\"same\", activation=\"relu\"), layers.Dropout(dropout), layers.Flatten(), layers.Dense(50, \"relu\")," ]
[ "is not None and auth_password is not None: self.auth =", "\"patch\" url = \"{}{}\" . format (url, name) data =", "= module.params['kong_admin_username'] auth_password = <PASSWORD>.params['<PASSWORD>'] state = module.params['state'] data =", "= self._api_exists(name, api_list) if api_exists: method = \"patch\" url =", "204 if state == \"list\": meta = response.json() has_changed =", "'latest', 'list', 'info'], type='str'), ) return AnsibleModule(argument_spec=args,supports_check_mode=False) def prepare_inputs(self, module):", "helper = ModuleHelper(fields) global module # might not need this", "= response.status_code == 204 if state == \"list\": meta =", "= api.add_or_update(**data) if state == \"absent\": response = api.delete_by_name(data.get(\"name\")) if", "_api_exists(self, name, api_list): for api in api_list: if name ==", "name = dict(required=False, type='str'), upstream_url = dict(required=False, type='str'), request_host =", "type='str'), kong_admin_username = dict(required=False, type='str'), kong_admin_password = dict(required=False, type='str'), name", "= base_url if auth_username is not None and auth_password is", "base_url if auth_username is not None and auth_password is not", "if state == \"present\": meta = response.json() has_changed = response.status_code", "'strip_request_path', 'preserve_host' ] helper = ModuleHelper(fields) global module # might", "response = api.list() if response.status_code == 401: module.fail_json(msg=\"Please specify kong_admin_username", "dict(required=False, default=False, type='bool'), preserve_host = dict(required=False, default=False, type='bool'), state =", "json, requests, os class KongAPI: def __init__(self, base_url, auth_username=None, auth_password=<PASSWORD>):", "None def __url(self, path): return \"{}{}\" . format (self.base_url, path)", "module.fail_json(msg=\"Please specify kong_admin_username and kong_admin_password\", meta=response.json()) elif response.status_code == 403:", "(id) url = self.__url(path) return requests.delete(url, auth=self.auth) class ModuleHelper: def", "state: present - name: Delete a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/", "module = helper.get_module() base_url, data, state, auth_user, auth_password = helper.prepare_inputs(module)", "= helper.get_module() base_url, data, state, auth_user, auth_password = helper.prepare_inputs(module) api", "kong_admin_username and kong_admin_password\", meta=response.json()) elif response.status_code == 403: module.fail_json(msg=\"Please check", "a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" state: absent '''", "api.delete_by_name(data.get(\"name\")) if state == \"list\": response = api.list() if response.status_code", "id = info.json().get(\"id\") return self.delete(id) def delete(self, id): path =", "\"absent\": response = api.delete_by_name(data.get(\"name\")) if state == \"list\": response =", "request_host is not None: data['request_host'] = request_host if request_path is", "id): path = \"/apis/{}\" . format (id) url = self.__url(path)", "auth=self.auth) def list(self): url = self.__url(\"/apis\") return requests.get(url, auth=self.auth) def", "kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" taget_url: \"http://mockbin.com\" request_host: \"mockbin.com\" state:", "meta = {} has_changed = response.status_code == 204 if state", "data = {} for field in self.fields: value = module.params.get(field,", "= info.json().get(\"id\") return self.delete(id) def delete(self, id): path = \"/apis/{}\"", "kong_admin_password = dict(required=False, type='str'), name = dict(required=False, type='str'), upstream_url =", "def main(): fields = [ 'name', 'upstream_url', 'request_host', 'request_path', 'strip_request_path',", "kong_admin_password\", meta=response.json()) else: has_changed, meta = helper.get_response(response, state) module.exit_json(changed=has_changed, meta=meta)", "False return (has_changed, meta) def main(): fields = [ 'name',", "] helper = ModuleHelper(fields) global module # might not need", "'list', 'info'], type='str'), ) return AnsibleModule(argument_spec=args,supports_check_mode=False) def prepare_inputs(self, module): url", "request_path return getattr(requests, method)(url, data, auth=self.auth) def list(self): url =", "= response.json() has_changed = response.status_code in [201, 200] if state", "= dict(required=False, default=\"present\", choices=['present', 'absent', 'latest', 'list', 'info'], type='str'), )", "in [201, 200] if state == \"absent\": meta = {}", "has_changed = response.status_code in [201, 200] if state == \"absent\":", "elif response.status_code == 403: module.fail_json(msg=\"Please check kong_admin_username and kong_admin_password\", meta=response.json())", "strip_request_path, \"preserve_host\": preserve_host } if request_host is not None: data['request_host']", "helper.get_module() base_url, data, state, auth_user, auth_password = helper.prepare_inputs(module) api =", ". format (id)) return requests.get(url, auth=self.auth) def delete_by_name(self, name): info", "= KongAPI(base_url, auth_user, auth_password) if state == \"present\": response =", "EXAMPLES = ''' - name: Register a site kong: kong_admin_uri:", "def add_or_update(self, name, upstream_url, request_host=None, request_path=None, strip_request_path=False, preserve_host=False): method =", "if auth_username is not None and auth_password is not None:", "meta = response.json() has_changed = False return (has_changed, meta) def", "requests.get(url, auth=self.auth) def delete_by_name(self, name): info = self.info(name) id =", "\"present\": response = api.add_or_update(**data) if state == \"absent\": response =", "'info'], type='str'), ) return AnsibleModule(argument_spec=args,supports_check_mode=False) def prepare_inputs(self, module): url =", "= [ 'name', 'upstream_url', 'request_host', 'request_path', 'strip_request_path', 'preserve_host' ] helper", "short_description: Configure a Kong API Gateway ''' EXAMPLES = '''", "= api.list() if response.status_code == 401: module.fail_json(msg=\"Please specify kong_admin_username and", "[]) api_exists = self._api_exists(name, api_list) if api_exists: method = \"patch\"", "module: kong short_description: Configure a Kong API Gateway ''' EXAMPLES", "dict(required=False, type='str'), kong_admin_username = dict(required=False, type='str'), kong_admin_password = dict(required=False, type='str'),", "has_changed, meta = helper.get_response(response, state) module.exit_json(changed=has_changed, meta=meta) from ansible.module_utils.basic import", "has_changed = False return (has_changed, meta) def main(): fields =", "is not None: data[field] = value return (url, data, state,", "data, state, auth_user, auth_password = helper.prepare_inputs(module) api = KongAPI(base_url, auth_user,", "is not None: data['request_path'] = request_path return getattr(requests, method)(url, data,", "None and auth_password is not None: self.auth = (auth_username, auth_password)", "state, auth_user, auth_password = helper.prepare_inputs(module) api = KongAPI(base_url, auth_user, auth_password)", "a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" taget_url: \"http://mockbin.com\" request_host:", "state, auth_user, auth_password) def get_response(self, response, state): if state ==", "preserve_host=False): method = \"post\" url = self.__url(\"/apis/\") api_list = self.list().json().get(\"data\",", "format (self.base_url, path) def _api_exists(self, name, api_list): for api in", "if response.status_code == 401: module.fail_json(msg=\"Please specify kong_admin_username and kong_admin_password\", meta=response.json())", "Kong API Gateway ''' EXAMPLES = ''' - name: Register", "= request_path return getattr(requests, method)(url, data, auth=self.auth) def list(self): url", "== 403: module.fail_json(msg=\"Please check kong_admin_username and kong_admin_password\", meta=response.json()) else: has_changed,", "name: \"Mockbin\" taget_url: \"http://mockbin.com\" request_host: \"mockbin.com\" state: present - name:", "<PASSWORD>.params['<PASSWORD>'] state = module.params['state'] data = {} for field in", "Gateway ''' EXAMPLES = ''' - name: Register a site", "auth_username is not None and auth_password is not None: self.auth", "= dict(required=False, type='str'), kong_admin_username = dict(required=False, type='str'), kong_admin_password = dict(required=False,", "#!/usr/bin/python DOCUMENTATION = ''' --- module: kong short_description: Configure a", "kong_admin_uri = dict(required=False, type='str'), kong_admin_username = dict(required=False, type='str'), kong_admin_password =", "response, state): if state == \"present\": meta = response.json() has_changed", "= self.__url(\"/apis/\") api_list = self.list().json().get(\"data\", []) api_exists = self._api_exists(name, api_list)", "getattr(requests, method)(url, data, auth=self.auth) def list(self): url = self.__url(\"/apis\") return", "default=False, type='bool'), state = dict(required=False, default=\"present\", choices=['present', 'absent', 'latest', 'list',", "None: data[field] = value return (url, data, state, auth_user, auth_password)", "def __url(self, path): return \"{}{}\" . format (self.base_url, path) def", "= { \"name\": name, \"upstream_url\": upstream_url, \"strip_request_path\": strip_request_path, \"preserve_host\": preserve_host", "dict( kong_admin_uri = dict(required=False, type='str'), kong_admin_username = dict(required=False, type='str'), kong_admin_password", "[201, 200] if state == \"absent\": meta = {} has_changed", "module.params['kong_admin_uri'] auth_user = module.params['kong_admin_username'] auth_password = <PASSWORD>.params['<PASSWORD>'] state = module.params['state']", "url = self.__url(\"/apis/{}\" . format (id)) return requests.get(url, auth=self.auth) def", "dict(required=False, type='str'), kong_admin_password = dict(required=False, type='str'), name = dict(required=False, type='str'),", "DOCUMENTATION = ''' --- module: kong short_description: Configure a Kong", "dict(required=False, default=False, type='bool'), state = dict(required=False, default=\"present\", choices=['present', 'absent', 'latest',", "in self.fields: value = module.params.get(field, None) if value is not", "data = { \"name\": name, \"upstream_url\": upstream_url, \"strip_request_path\": strip_request_path, \"preserve_host\":", "'absent', 'latest', 'list', 'info'], type='str'), ) return AnsibleModule(argument_spec=args,supports_check_mode=False) def prepare_inputs(self,", "api_list) if api_exists: method = \"patch\" url = \"{}{}\" .", "def _api_exists(self, name, api_list): for api in api_list: if name", "base_url, auth_username=None, auth_password=<PASSWORD>): self.base_url = base_url if auth_username is not", "module.params['state'] data = {} for field in self.fields: value =", "\"present\": meta = response.json() has_changed = response.status_code in [201, 200]", "self.fields: value = module.params.get(field, None) if value is not None:", ". format (id) url = self.__url(path) return requests.delete(url, auth=self.auth) class", "response.status_code in [201, 200] if state == \"absent\": meta =", "for api in api_list: if name == api.get(\"name\", None): return", "''' - name: Register a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name:", "def delete(self, id): path = \"/apis/{}\" . format (id) url", "= value return (url, data, state, auth_user, auth_password) def get_response(self,", "= {} has_changed = response.status_code == 204 if state ==", "Configure a Kong API Gateway ''' EXAMPLES = ''' -", "request_path=None, strip_request_path=False, preserve_host=False): method = \"post\" url = self.__url(\"/apis/\") api_list", "module.fail_json(msg=\"Please check kong_admin_username and kong_admin_password\", meta=response.json()) else: has_changed, meta =", "import * from ansible.module_utils.urls import * if __name__ == '__main__':", "= \"post\" url = self.__url(\"/apis/\") api_list = self.list().json().get(\"data\", []) api_exists", "response.status_code == 401: module.fail_json(msg=\"Please specify kong_admin_username and kong_admin_password\", meta=response.json()) elif", "self.info(name) id = info.json().get(\"id\") return self.delete(id) def delete(self, id): path", "global module # might not need this module = helper.get_module()", "format (id)) return requests.get(url, auth=self.auth) def delete_by_name(self, name): info =", "list(self): url = self.__url(\"/apis\") return requests.get(url, auth=self.auth) def info(self, id):", "''' --- module: kong short_description: Configure a Kong API Gateway", "name): info = self.info(name) id = info.json().get(\"id\") return self.delete(id) def", "API Gateway ''' EXAMPLES = ''' - name: Register a", "name) data = { \"name\": name, \"upstream_url\": upstream_url, \"strip_request_path\": strip_request_path,", "info(self, id): url = self.__url(\"/apis/{}\" . format (id)) return requests.get(url,", "= dict(required=False, type='str'), name = dict(required=False, type='str'), upstream_url = dict(required=False,", "method = \"post\" url = self.__url(\"/apis/\") api_list = self.list().json().get(\"data\", [])", "self.auth = None def __url(self, path): return \"{}{}\" . format", "data, state, auth_user, auth_password) def get_response(self, response, state): if state", "[ 'name', 'upstream_url', 'request_host', 'request_path', 'strip_request_path', 'preserve_host' ] helper =", "dict(required=False, type='str'), strip_request_path = dict(required=False, default=False, type='bool'), preserve_host = dict(required=False,", "return (has_changed, meta) def main(): fields = [ 'name', 'upstream_url',", "base_url, data, state, auth_user, auth_password = helper.prepare_inputs(module) api = KongAPI(base_url,", "= \"{}{}\" . format (url, name) data = { \"name\":", "= dict(required=False, type='str'), request_host = dict(required=False, type='str'), request_path = dict(required=False,", "info.json().get(\"id\") return self.delete(id) def delete(self, id): path = \"/apis/{}\" .", "type='str'), request_path = dict(required=False, type='str'), strip_request_path = dict(required=False, default=False, type='bool'),", "None: data['request_path'] = request_path return getattr(requests, method)(url, data, auth=self.auth) def", "* from ansible.module_utils.urls import * if __name__ == '__main__': main()", "= {} for field in self.fields: value = module.params.get(field, None)", "auth_password is not None: self.auth = (auth_username, auth_password) else: self.auth", "} if request_host is not None: data['request_host'] = request_host if", "self.delete(id) def delete(self, id): path = \"/apis/{}\" . format (id)", "not None: data['request_host'] = request_host if request_path is not None:", "state == \"list\": response = api.list() if response.status_code == 401:", "ModuleHelper: def __init__(self, fields): self.fields = fields def get_module(self): args", "http://127.0.0.1:8001/apis/ name: \"Mockbin\" state: absent ''' import json, requests, os", "data['request_host'] = request_host if request_path is not None: data['request_path'] =", "auth_user = module.params['kong_admin_username'] auth_password = <PASSWORD>.params['<PASSWORD>'] state = module.params['state'] data", "\"post\" url = self.__url(\"/apis/\") api_list = self.list().json().get(\"data\", []) api_exists =", "\"Mockbin\" taget_url: \"http://mockbin.com\" request_host: \"mockbin.com\" state: present - name: Delete", "'upstream_url', 'request_host', 'request_path', 'strip_request_path', 'preserve_host' ] helper = ModuleHelper(fields) global", "type='str'), strip_request_path = dict(required=False, default=False, type='bool'), preserve_host = dict(required=False, default=False,", "if state == \"absent\": meta = {} has_changed = response.status_code", "__init__(self, base_url, auth_username=None, auth_password=<PASSWORD>): self.base_url = base_url if auth_username is", "info = self.info(name) id = info.json().get(\"id\") return self.delete(id) def delete(self,", "def delete_by_name(self, name): info = self.info(name) id = info.json().get(\"id\") return", "\"/apis/{}\" . format (id) url = self.__url(path) return requests.delete(url, auth=self.auth)", "else: self.auth = None def __url(self, path): return \"{}{}\" .", "\"preserve_host\": preserve_host } if request_host is not None: data['request_host'] =", "add_or_update(self, name, upstream_url, request_host=None, request_path=None, strip_request_path=False, preserve_host=False): method = \"post\"", "name: Delete a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" state:", "upstream_url = dict(required=False, type='str'), request_host = dict(required=False, type='str'), request_path =", "self.auth = (auth_username, auth_password) else: self.auth = None def __url(self,", "self.list().json().get(\"data\", []) api_exists = self._api_exists(name, api_list) if api_exists: method =", "absent ''' import json, requests, os class KongAPI: def __init__(self,", "api.get(\"name\", None): return True return False def add_or_update(self, name, upstream_url,", "== 401: module.fail_json(msg=\"Please specify kong_admin_username and kong_admin_password\", meta=response.json()) elif response.status_code", "return requests.delete(url, auth=self.auth) class ModuleHelper: def __init__(self, fields): self.fields =", "= module.params['kong_admin_uri'] auth_user = module.params['kong_admin_username'] auth_password = <PASSWORD>.params['<PASSWORD>'] state =", "= helper.get_response(response, state) module.exit_json(changed=has_changed, meta=meta) from ansible.module_utils.basic import * from", "url = self.__url(\"/apis\") return requests.get(url, auth=self.auth) def info(self, id): url", "response = api.add_or_update(**data) if state == \"absent\": response = api.delete_by_name(data.get(\"name\"))", "delete_by_name(self, name): info = self.info(name) id = info.json().get(\"id\") return self.delete(id)", "response.json() has_changed = response.status_code in [201, 200] if state ==", "module): url = module.params['kong_admin_uri'] auth_user = module.params['kong_admin_username'] auth_password = <PASSWORD>.params['<PASSWORD>']", "fields): self.fields = fields def get_module(self): args = dict( kong_admin_uri", "requests, os class KongAPI: def __init__(self, base_url, auth_username=None, auth_password=<PASSWORD>): self.base_url", "state = module.params['state'] data = {} for field in self.fields:", "method)(url, data, auth=self.auth) def list(self): url = self.__url(\"/apis\") return requests.get(url,", "upstream_url, \"strip_request_path\": strip_request_path, \"preserve_host\": preserve_host } if request_host is not", "__url(self, path): return \"{}{}\" . format (self.base_url, path) def _api_exists(self,", "state == \"present\": meta = response.json() has_changed = response.status_code in", "'request_host', 'request_path', 'strip_request_path', 'preserve_host' ] helper = ModuleHelper(fields) global module", "True return False def add_or_update(self, name, upstream_url, request_host=None, request_path=None, strip_request_path=False,", "auth_user, auth_password) if state == \"present\": response = api.add_or_update(**data) if", "might not need this module = helper.get_module() base_url, data, state,", "- name: Register a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\"", "name, api_list): for api in api_list: if name == api.get(\"name\",", "dict(required=False, type='str'), upstream_url = dict(required=False, type='str'), request_host = dict(required=False, type='str'),", "class ModuleHelper: def __init__(self, fields): self.fields = fields def get_module(self):", "{ \"name\": name, \"upstream_url\": upstream_url, \"strip_request_path\": strip_request_path, \"preserve_host\": preserve_host }", "\"name\": name, \"upstream_url\": upstream_url, \"strip_request_path\": strip_request_path, \"preserve_host\": preserve_host } if", "name: Register a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" taget_url:", "def get_response(self, response, state): if state == \"present\": meta =", "--- module: kong short_description: Configure a Kong API Gateway '''", "\"list\": meta = response.json() has_changed = False return (has_changed, meta)", "= self.__url(\"/apis/{}\" . format (id)) return requests.get(url, auth=self.auth) def delete_by_name(self,", "default=\"present\", choices=['present', 'absent', 'latest', 'list', 'info'], type='str'), ) return AnsibleModule(argument_spec=args,supports_check_mode=False)", "self.fields = fields def get_module(self): args = dict( kong_admin_uri =", "\"list\": response = api.list() if response.status_code == 401: module.fail_json(msg=\"Please specify", "meta=response.json()) elif response.status_code == 403: module.fail_json(msg=\"Please check kong_admin_username and kong_admin_password\",", "\"strip_request_path\": strip_request_path, \"preserve_host\": preserve_host } if request_host is not None:", "(id)) return requests.get(url, auth=self.auth) def delete_by_name(self, name): info = self.info(name)", "meta = helper.get_response(response, state) module.exit_json(changed=has_changed, meta=meta) from ansible.module_utils.basic import *", "return (url, data, state, auth_user, auth_password) def get_response(self, response, state):", "if state == \"list\": meta = response.json() has_changed = False", "request_host: \"mockbin.com\" state: present - name: Delete a site kong:", "request_path = dict(required=False, type='str'), strip_request_path = dict(required=False, default=False, type='bool'), preserve_host", "auth_password=<PASSWORD>): self.base_url = base_url if auth_username is not None and", "auth_user, auth_password = helper.prepare_inputs(module) api = KongAPI(base_url, auth_user, auth_password) if", "name == api.get(\"name\", None): return True return False def add_or_update(self,", "Delete a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" state: absent", "\"upstream_url\": upstream_url, \"strip_request_path\": strip_request_path, \"preserve_host\": preserve_host } if request_host is", "self.__url(path) return requests.delete(url, auth=self.auth) class ModuleHelper: def __init__(self, fields): self.fields", "check kong_admin_username and kong_admin_password\", meta=response.json()) else: has_changed, meta = helper.get_response(response,", "= <PASSWORD>.params['<PASSWORD>'] state = module.params['state'] data = {} for field", "= self.list().json().get(\"data\", []) api_exists = self._api_exists(name, api_list) if api_exists: method", "(url, data, state, auth_user, auth_password) def get_response(self, response, state): if", "taget_url: \"http://mockbin.com\" request_host: \"mockbin.com\" state: present - name: Delete a", "site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" state: absent ''' import", "auth_user, auth_password) def get_response(self, response, state): if state == \"present\":", "in api_list: if name == api.get(\"name\", None): return True return", "else: has_changed, meta = helper.get_response(response, state) module.exit_json(changed=has_changed, meta=meta) from ansible.module_utils.basic", "auth=self.auth) def delete_by_name(self, name): info = self.info(name) id = info.json().get(\"id\")", "def get_module(self): args = dict( kong_admin_uri = dict(required=False, type='str'), kong_admin_username", "state == \"present\": response = api.add_or_update(**data) if state == \"absent\":", "return True return False def add_or_update(self, name, upstream_url, request_host=None, request_path=None,", "name, upstream_url, request_host=None, request_path=None, strip_request_path=False, preserve_host=False): method = \"post\" url", "meta = response.json() has_changed = response.status_code in [201, 200] if", "requests.delete(url, auth=self.auth) class ModuleHelper: def __init__(self, fields): self.fields = fields", "request_path is not None: data['request_path'] = request_path return getattr(requests, method)(url,", "choices=['present', 'absent', 'latest', 'list', 'info'], type='str'), ) return AnsibleModule(argument_spec=args,supports_check_mode=False) def", "def info(self, id): url = self.__url(\"/apis/{}\" . format (id)) return", "state): if state == \"present\": meta = response.json() has_changed =", "= self.__url(path) return requests.delete(url, auth=self.auth) class ModuleHelper: def __init__(self, fields):", "api_list: if name == api.get(\"name\", None): return True return False", "data, auth=self.auth) def list(self): url = self.__url(\"/apis\") return requests.get(url, auth=self.auth)", "value return (url, data, state, auth_user, auth_password) def get_response(self, response,", "helper.prepare_inputs(module) api = KongAPI(base_url, auth_user, auth_password) if state == \"present\":", "fields def get_module(self): args = dict( kong_admin_uri = dict(required=False, type='str'),", "= request_host if request_path is not None: data['request_path'] = request_path", "auth=self.auth) def info(self, id): url = self.__url(\"/apis/{}\" . format (id))", "= module.params['state'] data = {} for field in self.fields: value", "= response.status_code in [201, 200] if state == \"absent\": meta", "None: self.auth = (auth_username, auth_password) else: self.auth = None def", "return AnsibleModule(argument_spec=args,supports_check_mode=False) def prepare_inputs(self, module): url = module.params['kong_admin_uri'] auth_user =", "data[field] = value return (url, data, state, auth_user, auth_password) def", "None) if value is not None: data[field] = value return", "= dict(required=False, type='str'), upstream_url = dict(required=False, type='str'), request_host = dict(required=False,", "has_changed = response.status_code == 204 if state == \"list\": meta", "if api_exists: method = \"patch\" url = \"{}{}\" . format", "if value is not None: data[field] = value return (url,", "= self.__url(\"/apis\") return requests.get(url, auth=self.auth) def info(self, id): url =", "self.__url(\"/apis/{}\" . format (id)) return requests.get(url, auth=self.auth) def delete_by_name(self, name):", "auth_password) else: self.auth = None def __url(self, path): return \"{}{}\"", "a Kong API Gateway ''' EXAMPLES = ''' - name:", "kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" taget_url: \"http://mockbin.com\" request_host: \"mockbin.com\" state: present", "delete(self, id): path = \"/apis/{}\" . format (id) url =", "== api.get(\"name\", None): return True return False def add_or_update(self, name,", "(url, name) data = { \"name\": name, \"upstream_url\": upstream_url, \"strip_request_path\":", "requests.get(url, auth=self.auth) def info(self, id): url = self.__url(\"/apis/{}\" . format", "= api.delete_by_name(data.get(\"name\")) if state == \"list\": response = api.list() if", "not None: data[field] = value return (url, data, state, auth_user,", "= fields def get_module(self): args = dict( kong_admin_uri = dict(required=False,", "= dict(required=False, type='str'), strip_request_path = dict(required=False, default=False, type='bool'), preserve_host =", "dict(required=False, type='str'), request_host = dict(required=False, type='str'), request_path = dict(required=False, type='str'),", "api_exists: method = \"patch\" url = \"{}{}\" . format (url,", "response = api.delete_by_name(data.get(\"name\")) if state == \"list\": response = api.list()", "api_list = self.list().json().get(\"data\", []) api_exists = self._api_exists(name, api_list) if api_exists:", "return False def add_or_update(self, name, upstream_url, request_host=None, request_path=None, strip_request_path=False, preserve_host=False):", "module.params.get(field, None) if value is not None: data[field] = value", "== \"absent\": response = api.delete_by_name(data.get(\"name\")) if state == \"list\": response", "type='str'), ) return AnsibleModule(argument_spec=args,supports_check_mode=False) def prepare_inputs(self, module): url = module.params['kong_admin_uri']", "= False return (has_changed, meta) def main(): fields = [", "ansible.module_utils.basic import * from ansible.module_utils.urls import * if __name__ ==", "kong_admin_username = dict(required=False, type='str'), kong_admin_password = dict(required=False, type='str'), name =", "return self.delete(id) def delete(self, id): path = \"/apis/{}\" . format", "''' import json, requests, os class KongAPI: def __init__(self, base_url,", "= (auth_username, auth_password) else: self.auth = None def __url(self, path):", "and kong_admin_password\", meta=response.json()) else: has_changed, meta = helper.get_response(response, state) module.exit_json(changed=has_changed,", "args = dict( kong_admin_uri = dict(required=False, type='str'), kong_admin_username = dict(required=False,", "type='str'), upstream_url = dict(required=False, type='str'), request_host = dict(required=False, type='str'), request_path", "get_module(self): args = dict( kong_admin_uri = dict(required=False, type='str'), kong_admin_username =", "auth=self.auth) class ModuleHelper: def __init__(self, fields): self.fields = fields def", "preserve_host } if request_host is not None: data['request_host'] = request_host", "= None def __url(self, path): return \"{}{}\" . format (self.base_url,", "get_response(self, response, state): if state == \"present\": meta = response.json()", "not None: self.auth = (auth_username, auth_password) else: self.auth = None", "present - name: Delete a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name:", "strip_request_path = dict(required=False, default=False, type='bool'), preserve_host = dict(required=False, default=False, type='bool'),", "from ansible.module_utils.basic import * from ansible.module_utils.urls import * if __name__", "state) module.exit_json(changed=has_changed, meta=meta) from ansible.module_utils.basic import * from ansible.module_utils.urls import", "prepare_inputs(self, module): url = module.params['kong_admin_uri'] auth_user = module.params['kong_admin_username'] auth_password =", "== 204 if state == \"list\": meta = response.json() has_changed", "== \"absent\": meta = {} has_changed = response.status_code == 204", "api_list): for api in api_list: if name == api.get(\"name\", None):", "(auth_username, auth_password) else: self.auth = None def __url(self, path): return", "path): return \"{}{}\" . format (self.base_url, path) def _api_exists(self, name,", "== \"list\": meta = response.json() has_changed = False return (has_changed,", "request_host if request_path is not None: data['request_path'] = request_path return", "= self.info(name) id = info.json().get(\"id\") return self.delete(id) def delete(self, id):", "auth_password = helper.prepare_inputs(module) api = KongAPI(base_url, auth_user, auth_password) if state", "kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" state: absent ''' import json, requests,", "preserve_host = dict(required=False, default=False, type='bool'), state = dict(required=False, default=\"present\", choices=['present',", "state == \"absent\": response = api.delete_by_name(data.get(\"name\")) if state == \"list\":", "type='str'), kong_admin_password = dict(required=False, type='str'), name = dict(required=False, type='str'), upstream_url", "return \"{}{}\" . format (self.base_url, path) def _api_exists(self, name, api_list):", "type='str'), request_host = dict(required=False, type='str'), request_path = dict(required=False, type='str'), strip_request_path", "= ModuleHelper(fields) global module # might not need this module", "path) def _api_exists(self, name, api_list): for api in api_list: if", "data['request_path'] = request_path return getattr(requests, method)(url, data, auth=self.auth) def list(self):", "api_exists = self._api_exists(name, api_list) if api_exists: method = \"patch\" url", "value is not None: data[field] = value return (url, data,", "os class KongAPI: def __init__(self, base_url, auth_username=None, auth_password=<PASSWORD>): self.base_url =", "state == \"absent\": meta = {} has_changed = response.status_code ==", "self.base_url = base_url if auth_username is not None and auth_password", "module.params['kong_admin_username'] auth_password = <PASSWORD>.params['<PASSWORD>'] state = module.params['state'] data = {}", "self.__url(\"/apis\") return requests.get(url, auth=self.auth) def info(self, id): url = self.__url(\"/apis/{}\"", "\"http://mockbin.com\" request_host: \"mockbin.com\" state: present - name: Delete a site", "KongAPI: def __init__(self, base_url, auth_username=None, auth_password=<PASSWORD>): self.base_url = base_url if", "= dict(required=False, default=False, type='bool'), state = dict(required=False, default=\"present\", choices=['present', 'absent',", "if name == api.get(\"name\", None): return True return False def", ". format (url, name) data = { \"name\": name, \"upstream_url\":", "is not None: self.auth = (auth_username, auth_password) else: self.auth =", "\"{}{}\" . format (url, name) data = { \"name\": name,", "url = self.__url(\"/apis/\") api_list = self.list().json().get(\"data\", []) api_exists = self._api_exists(name,", "return getattr(requests, method)(url, data, auth=self.auth) def list(self): url = self.__url(\"/apis\")", "= dict(required=False, type='str'), kong_admin_password = dict(required=False, type='str'), name = dict(required=False,", "state == \"list\": meta = response.json() has_changed = False return", "this module = helper.get_module() base_url, data, state, auth_user, auth_password =", "format (id) url = self.__url(path) return requests.delete(url, auth=self.auth) class ModuleHelper:", "site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" taget_url: \"http://mockbin.com\" request_host: \"mockbin.com\"", "return requests.get(url, auth=self.auth) def info(self, id): url = self.__url(\"/apis/{}\" .", "def __init__(self, fields): self.fields = fields def get_module(self): args =", "= \"/apis/{}\" . format (id) url = self.__url(path) return requests.delete(url,", "format (url, name) data = { \"name\": name, \"upstream_url\": upstream_url,", "response.status_code == 204 if state == \"list\": meta = response.json()", "type='bool'), preserve_host = dict(required=False, default=False, type='bool'), state = dict(required=False, default=\"present\",", "None): return True return False def add_or_update(self, name, upstream_url, request_host=None,", "if state == \"absent\": response = api.delete_by_name(data.get(\"name\")) if state ==", "{} has_changed = response.status_code == 204 if state == \"list\":", "auth_password = <PASSWORD>.params['<PASSWORD>'] state = module.params['state'] data = {} for", "== \"present\": response = api.add_or_update(**data) if state == \"absent\": response", "state: absent ''' import json, requests, os class KongAPI: def", "url = self.__url(path) return requests.delete(url, auth=self.auth) class ModuleHelper: def __init__(self,", "value = module.params.get(field, None) if value is not None: data[field]", "method = \"patch\" url = \"{}{}\" . format (url, name)", "self.__url(\"/apis/\") api_list = self.list().json().get(\"data\", []) api_exists = self._api_exists(name, api_list) if", "kong_admin_username and kong_admin_password\", meta=response.json()) else: has_changed, meta = helper.get_response(response, state)", "upstream_url, request_host=None, request_path=None, strip_request_path=False, preserve_host=False): method = \"post\" url =", "'name', 'upstream_url', 'request_host', 'request_path', 'strip_request_path', 'preserve_host' ] helper = ModuleHelper(fields)", "dict(required=False, type='str'), name = dict(required=False, type='str'), upstream_url = dict(required=False, type='str'),", "helper.get_response(response, state) module.exit_json(changed=has_changed, meta=meta) from ansible.module_utils.basic import * from ansible.module_utils.urls", "for field in self.fields: value = module.params.get(field, None) if value", "request_host=None, request_path=None, strip_request_path=False, preserve_host=False): method = \"post\" url = self.__url(\"/apis/\")", "(has_changed, meta) def main(): fields = [ 'name', 'upstream_url', 'request_host',", "auth_password) if state == \"present\": response = api.add_or_update(**data) if state", "need this module = helper.get_module() base_url, data, state, auth_user, auth_password", "url = \"{}{}\" . format (url, name) data = {", "api in api_list: if name == api.get(\"name\", None): return True", "403: module.fail_json(msg=\"Please check kong_admin_username and kong_admin_password\", meta=response.json()) else: has_changed, meta", "== \"list\": response = api.list() if response.status_code == 401: module.fail_json(msg=\"Please", "AnsibleModule(argument_spec=args,supports_check_mode=False) def prepare_inputs(self, module): url = module.params['kong_admin_uri'] auth_user = module.params['kong_admin_username']", "specify kong_admin_username and kong_admin_password\", meta=response.json()) elif response.status_code == 403: module.fail_json(msg=\"Please", "if request_host is not None: data['request_host'] = request_host if request_path", "def list(self): url = self.__url(\"/apis\") return requests.get(url, auth=self.auth) def info(self,", "kong short_description: Configure a Kong API Gateway ''' EXAMPLES =", "http://127.0.0.1:8001/apis/ name: \"Mockbin\" taget_url: \"http://mockbin.com\" request_host: \"mockbin.com\" state: present -", "{} for field in self.fields: value = module.params.get(field, None) if", "if state == \"list\": response = api.list() if response.status_code ==", "'request_path', 'strip_request_path', 'preserve_host' ] helper = ModuleHelper(fields) global module #", "api.add_or_update(**data) if state == \"absent\": response = api.delete_by_name(data.get(\"name\")) if state", "response.json() has_changed = False return (has_changed, meta) def main(): fields", "\"Mockbin\" state: absent ''' import json, requests, os class KongAPI:", "fields = [ 'name', 'upstream_url', 'request_host', 'request_path', 'strip_request_path', 'preserve_host' ]", "meta=response.json()) else: has_changed, meta = helper.get_response(response, state) module.exit_json(changed=has_changed, meta=meta) from", "path = \"/apis/{}\" . format (id) url = self.__url(path) return", "= dict(required=False, type='str'), request_path = dict(required=False, type='str'), strip_request_path = dict(required=False,", "if request_path is not None: data['request_path'] = request_path return getattr(requests,", "\"{}{}\" . format (self.base_url, path) def _api_exists(self, name, api_list): for", "Register a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" taget_url: \"http://mockbin.com\"", "module # might not need this module = helper.get_module() base_url,", "KongAPI(base_url, auth_user, auth_password) if state == \"present\": response = api.add_or_update(**data)", "api.list() if response.status_code == 401: module.fail_json(msg=\"Please specify kong_admin_username and kong_admin_password\",", "'preserve_host' ] helper = ModuleHelper(fields) global module # might not", "= helper.prepare_inputs(module) api = KongAPI(base_url, auth_user, auth_password) if state ==", "import json, requests, os class KongAPI: def __init__(self, base_url, auth_username=None,", "kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\" state: absent ''' import json,", "id): url = self.__url(\"/apis/{}\" . format (id)) return requests.get(url, auth=self.auth)", "auth_password) def get_response(self, response, state): if state == \"present\": meta", "== \"present\": meta = response.json() has_changed = response.status_code in [201,", "dict(required=False, type='str'), request_path = dict(required=False, type='str'), strip_request_path = dict(required=False, default=False,", "field in self.fields: value = module.params.get(field, None) if value is", "''' EXAMPLES = ''' - name: Register a site kong:", "state = dict(required=False, default=\"present\", choices=['present', 'absent', 'latest', 'list', 'info'], type='str'),", "not need this module = helper.get_module() base_url, data, state, auth_user,", "kong_admin_password\", meta=response.json()) elif response.status_code == 403: module.fail_json(msg=\"Please check kong_admin_username and", "not None: data['request_path'] = request_path return getattr(requests, method)(url, data, auth=self.auth)", "<reponame>sebastienc/ansible-kong-module<filename>library/kong_api.py<gh_stars>10-100 #!/usr/bin/python DOCUMENTATION = ''' --- module: kong short_description: Configure", "\"mockbin.com\" state: present - name: Delete a site kong: kong_admin_uri:", "module.exit_json(changed=has_changed, meta=meta) from ansible.module_utils.basic import * from ansible.module_utils.urls import *", "main(): fields = [ 'name', 'upstream_url', 'request_host', 'request_path', 'strip_request_path', 'preserve_host'", "response.status_code == 403: module.fail_json(msg=\"Please check kong_admin_username and kong_admin_password\", meta=response.json()) else:", "def prepare_inputs(self, module): url = module.params['kong_admin_uri'] auth_user = module.params['kong_admin_username'] auth_password", "url = module.params['kong_admin_uri'] auth_user = module.params['kong_admin_username'] auth_password = <PASSWORD>.params['<PASSWORD>'] state", "type='bool'), state = dict(required=False, default=\"present\", choices=['present', 'absent', 'latest', 'list', 'info'],", "meta=meta) from ansible.module_utils.basic import * from ansible.module_utils.urls import * if", "auth_username=None, auth_password=<PASSWORD>): self.base_url = base_url if auth_username is not None", "dict(required=False, default=\"present\", choices=['present', 'absent', 'latest', 'list', 'info'], type='str'), ) return", "type='str'), name = dict(required=False, type='str'), upstream_url = dict(required=False, type='str'), request_host", "if state == \"present\": response = api.add_or_update(**data) if state ==", "request_host = dict(required=False, type='str'), request_path = dict(required=False, type='str'), strip_request_path =", "meta) def main(): fields = [ 'name', 'upstream_url', 'request_host', 'request_path',", "= response.json() has_changed = False return (has_changed, meta) def main():", "= \"patch\" url = \"{}{}\" . format (url, name) data", "(self.base_url, path) def _api_exists(self, name, api_list): for api in api_list:", "api = KongAPI(base_url, auth_user, auth_password) if state == \"present\": response", "def __init__(self, base_url, auth_username=None, auth_password=<PASSWORD>): self.base_url = base_url if auth_username", "is not None: data['request_host'] = request_host if request_path is not", "and auth_password is not None: self.auth = (auth_username, auth_password) else:", "class KongAPI: def __init__(self, base_url, auth_username=None, auth_password=<PASSWORD>): self.base_url = base_url", "self._api_exists(name, api_list) if api_exists: method = \"patch\" url = \"{}{}\"", "name, \"upstream_url\": upstream_url, \"strip_request_path\": strip_request_path, \"preserve_host\": preserve_host } if request_host", "None: data['request_host'] = request_host if request_path is not None: data['request_path']", "= dict(required=False, default=False, type='bool'), preserve_host = dict(required=False, default=False, type='bool'), state", "200] if state == \"absent\": meta = {} has_changed =", "default=False, type='bool'), preserve_host = dict(required=False, default=False, type='bool'), state = dict(required=False,", "strip_request_path=False, preserve_host=False): method = \"post\" url = self.__url(\"/apis/\") api_list =", "= dict( kong_admin_uri = dict(required=False, type='str'), kong_admin_username = dict(required=False, type='str'),", "401: module.fail_json(msg=\"Please specify kong_admin_username and kong_admin_password\", meta=response.json()) elif response.status_code ==", "__init__(self, fields): self.fields = fields def get_module(self): args = dict(", "name: \"Mockbin\" state: absent ''' import json, requests, os class", "and kong_admin_password\", meta=response.json()) elif response.status_code == 403: module.fail_json(msg=\"Please check kong_admin_username", "- name: Delete a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: \"Mockbin\"", "ModuleHelper(fields) global module # might not need this module =", "False def add_or_update(self, name, upstream_url, request_host=None, request_path=None, strip_request_path=False, preserve_host=False): method", "return requests.get(url, auth=self.auth) def delete_by_name(self, name): info = self.info(name) id", "\"absent\": meta = {} has_changed = response.status_code == 204 if", "= ''' - name: Register a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/", "not None and auth_password is not None: self.auth = (auth_username,", "= ''' --- module: kong short_description: Configure a Kong API", "= module.params.get(field, None) if value is not None: data[field] =", ". format (self.base_url, path) def _api_exists(self, name, api_list): for api", ") return AnsibleModule(argument_spec=args,supports_check_mode=False) def prepare_inputs(self, module): url = module.params['kong_admin_uri'] auth_user", "# might not need this module = helper.get_module() base_url, data," ]
[ "y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points:", "= self.plotter.add(self.line.end) else: points = self.clip() if points: p0, p1", "linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist =", "Artist from matplotlib.lines import Line2D from compas.geometry import intersection_line_box_xy __all__", "= points x0, y0 = p0[:2] x1, y1 = p1[:2]", "self._mpl_line = None self._start_artist = None self._end_artist = None self._segment_artist", "y1 = self.line.end[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else:", "= self.line.end[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle,", "compas_plotters.artists import Artist from matplotlib.lines import Line2D from compas.geometry import", "self.line.end[:2]] def draw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1,", "[xmax, ymax], [xmin, ymax]] return intersection_line_box_xy(self.line, box) @property def data(self):", "ymax], [xmin, ymax]] return intersection_line_box_xy(self.line, box) @property def data(self): return", "self._start_artist = None self._end_artist = None self._segment_artist = None self._draw_points", "points = self.clip() if points: p0, p1 = points x0,", "import intersection_line_box_xy __all__ = ['LineArtist'] class LineArtist(Artist): \"\"\"\"\"\" zorder =", "= p0[:2] x1, y1 = p1[:2] line2d = Line2D([x0, x1],", "= self.plotter.add(self.line.end) def redraw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2]", "y1 = self.line.end[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth,", "def __init__(self, line, draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0, 0, 0)):", "self.linestyle = linestyle self.color = color def clip(self): xlim, ylim", "box = [[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]]", "box) @property def data(self): return [self.line.start[:2], self.line.end[:2]] def draw(self): if", "color=(0, 0, 0)): super(LineArtist, self).__init__(line) self._mpl_line = None self._start_artist =", "= None self._start_artist = None self._end_artist = None self._segment_artist =", "Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line =", "xmax = xlim ymin, ymax = ylim box = [[xmin,", "from matplotlib.lines import Line2D from compas.geometry import intersection_line_box_xy __all__ =", "xlim, ylim = self.plotter.viewbox xmin, xmax = xlim ymin, ymax", "ymin, ymax = ylim box = [[xmin, ymin], [xmax, ymin],", "ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]] return intersection_line_box_xy(self.line, box)", "None self._start_artist = None self._end_artist = None self._segment_artist = None", "draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0, 0, 0)): super(LineArtist, self).__init__(line) self._mpl_line =", "x1, y1 = p1[:2] line2d = Line2D([x0, x1], [y0, y1],", "self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) def redraw(self): if self._draw_as_segment: x0, y0", "ylim box = [[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin,", "if points: p0, p1 = points x0, y0 = p0[:2]", "class LineArtist(Artist): \"\"\"\"\"\" zorder = 1000 def __init__(self, line, draw_points=False,", "LineArtist(Artist): \"\"\"\"\"\" zorder = 1000 def __init__(self, line, draw_points=False, draw_as_segment=False,", "self.line.start[:2] x1, y1 = self.line.end[:2] line2d = Line2D([x0, x1], [y0,", "self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist =", "y0 = p0[:2] x1, y1 = p1[:2] line2d = Line2D([x0,", "y0 = self.line.start[:2] x1, y1 = self.line.end[:2] line2d = Line2D([x0,", "= self.line.end[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else: points", "linestyle='solid', color=(0, 0, 0)): super(LineArtist, self).__init__(line) self._mpl_line = None self._start_artist", "self._segment_artist = None self._draw_points = draw_points self._draw_as_segment = draw_as_segment self.line", "self.color = color def clip(self): xlim, ylim = self.plotter.viewbox xmin,", "y1 = p1[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth,", "self.plotter.add(self.line.end) def redraw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1,", "self.linewidth = linewidth self.linestyle = linestyle self.color = color def", "draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0, 0, 0)): super(LineArtist, self).__init__(line) self._mpl_line", "self.line.end[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color,", "x1, y1 = self.line.end[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth)", "<reponame>XingxinHE/compas<filename>src/compas_plotters/artists/lineartist.py from compas_plotters.artists import Artist from matplotlib.lines import Line2D from", "self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) else:", "self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) else: points =", "linewidth=1.0, linestyle='solid', color=(0, 0, 0)): super(LineArtist, self).__init__(line) self._mpl_line = None", "linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist", "p0[:2] x1, y1 = p1[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color)", "0, 0)): super(LineArtist, self).__init__(line) self._mpl_line = None self._start_artist = None", "[xmax, ymin], [xmax, ymax], [xmin, ymax]] return intersection_line_box_xy(self.line, box) @property", "draw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 =", "p0, p1 = points x0, y0 = p0[:2] x1, y1", "self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) else: points = self.clip()", "= self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) def redraw(self): if self._draw_as_segment: x0,", "return intersection_line_box_xy(self.line, box) @property def data(self): return [self.line.start[:2], self.line.end[:2]] def", "__init__(self, line, draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0, 0, 0)): super(LineArtist,", "= p1[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle,", "self._end_artist = None self._segment_artist = None self._draw_points = draw_points self._draw_as_segment", "= draw_points self._draw_as_segment = draw_as_segment self.line = line self.linewidth =", "draw_points self._draw_as_segment = draw_as_segment self.line = line self.linewidth = linewidth", "p1[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color,", "x1, y1 = p1[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth)", "import Line2D from compas.geometry import intersection_line_box_xy __all__ = ['LineArtist'] class", "= ['LineArtist'] class LineArtist(Artist): \"\"\"\"\"\" zorder = 1000 def __init__(self,", "zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist", "p1 = points x0, y0 = p0[:2] x1, y1 =", "None self._end_artist = None self._segment_artist = None self._draw_points = draw_points", "p0[:2] x1, y1 = p1[:2] line2d = Line2D([x0, x1], [y0,", "super(LineArtist, self).__init__(line) self._mpl_line = None self._start_artist = None self._end_artist =", "def draw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1", "= self.line.start[:2] x1, y1 = self.line.end[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1])", "self.plotter.viewbox xmin, xmax = xlim ymin, ymax = ylim box", "x0, y0 = p0[:2] x1, y1 = p1[:2] self._mpl_line.set_xdata([x0, x1])", "xmin, xmax = xlim ymin, ymax = ylim box =", "self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) def redraw(self): if self._draw_as_segment:", "color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start)", "= line self.linewidth = linewidth self.linestyle = linestyle self.color =", "[xmin, ymax]] return intersection_line_box_xy(self.line, box) @property def data(self): return [self.line.start[:2],", "compas.geometry import intersection_line_box_xy __all__ = ['LineArtist'] class LineArtist(Artist): \"\"\"\"\"\" zorder", "import Artist from matplotlib.lines import Line2D from compas.geometry import intersection_line_box_xy", "matplotlib.lines import Line2D from compas.geometry import intersection_line_box_xy __all__ = ['LineArtist']", "= color def clip(self): xlim, ylim = self.plotter.viewbox xmin, xmax", "= linestyle self.color = color def clip(self): xlim, ylim =", "1000 def __init__(self, line, draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0, 0,", "ymin], [xmax, ymax], [xmin, ymax]] return intersection_line_box_xy(self.line, box) @property def", "data(self): return [self.line.start[:2], self.line.end[:2]] def draw(self): if self._draw_as_segment: x0, y0", "self.line.end[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else: points =", "def data(self): return [self.line.start[:2], self.line.end[:2]] def draw(self): if self._draw_as_segment: x0,", "xlim ymin, ymax = ylim box = [[xmin, ymin], [xmax,", "from compas.geometry import intersection_line_box_xy __all__ = ['LineArtist'] class LineArtist(Artist): \"\"\"\"\"\"", "0)): super(LineArtist, self).__init__(line) self._mpl_line = None self._start_artist = None self._end_artist", "= self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) else: points = self.clip() if", "None self._draw_points = draw_points self._draw_as_segment = draw_as_segment self.line = line", "zorder = 1000 def __init__(self, line, draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid',", "linestyle self.color = color def clip(self): xlim, ylim = self.plotter.viewbox", "x1, y1 = self.line.end[:2] line2d = Line2D([x0, x1], [y0, y1],", "\"\"\"\"\"\" zorder = 1000 def __init__(self, line, draw_points=False, draw_as_segment=False, linewidth=1.0,", "return [self.line.start[:2], self.line.end[:2]] def draw(self): if self._draw_as_segment: x0, y0 =", "x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else: points = self.clip() if", "self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else: points = self.clip() if points:", "= Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line", "points x0, y0 = p0[:2] x1, y1 = p1[:2] self._mpl_line.set_xdata([x0,", "@property def data(self): return [self.line.start[:2], self.line.end[:2]] def draw(self): if self._draw_as_segment:", "self.line = line self.linewidth = linewidth self.linestyle = linestyle self.color", "from compas_plotters.artists import Artist from matplotlib.lines import Line2D from compas.geometry", "= 1000 def __init__(self, line, draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0,", "linewidth self.linestyle = linestyle self.color = color def clip(self): xlim,", "self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) def", "self.clip() if points: p0, p1 = points x0, y0 =", "self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else: points = self.clip()", "self.plotter.add(self.line.end) else: points = self.clip() if points: p0, p1 =", "[y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if", "= None self._segment_artist = None self._draw_points = draw_points self._draw_as_segment =", "self._draw_as_segment = draw_as_segment self.line = line self.linewidth = linewidth self.linestyle", "line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder)", "x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d)", "intersection_line_box_xy __all__ = ['LineArtist'] class LineArtist(Artist): \"\"\"\"\"\" zorder = 1000", "ylim = self.plotter.viewbox xmin, xmax = xlim ymin, ymax =", "ymax = ylim box = [[xmin, ymin], [xmax, ymin], [xmax,", "self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2] self._mpl_line.set_xdata([x0,", "line self.linewidth = linewidth self.linestyle = linestyle self.color = color", "= ylim box = [[xmin, ymin], [xmax, ymin], [xmax, ymax],", "= self.plotter.viewbox xmin, xmax = xlim ymin, ymax = ylim", "clip(self): xlim, ylim = self.plotter.viewbox xmin, xmax = xlim ymin,", "= self.clip() if points: p0, p1 = points x0, y0", "['LineArtist'] class LineArtist(Artist): \"\"\"\"\"\" zorder = 1000 def __init__(self, line,", "Line2D from compas.geometry import intersection_line_box_xy __all__ = ['LineArtist'] class LineArtist(Artist):", "self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2] line2d", "self.line.start[:2] x1, y1 = self.line.end[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color)", "points x0, y0 = p0[:2] x1, y1 = p1[:2] line2d", "self).__init__(line) self._mpl_line = None self._start_artist = None self._end_artist = None", "def clip(self): xlim, ylim = self.plotter.viewbox xmin, xmax = xlim", "None self._segment_artist = None self._draw_points = draw_points self._draw_as_segment = draw_as_segment", "self._mpl_line.set_linewidth(self.linewidth) else: points = self.clip() if points: p0, p1 =", "y0 = self.line.start[:2] x1, y1 = self.line.end[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0,", "y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else: points = self.clip() if points: p0,", "self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else: points = self.clip() if points: p0, p1", "= [[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]] return", "= None self._draw_points = draw_points self._draw_as_segment = draw_as_segment self.line =", "points: p0, p1 = points x0, y0 = p0[:2] x1,", "self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) def redraw(self): if", "= None self._end_artist = None self._segment_artist = None self._draw_points =", "= linewidth self.linestyle = linestyle self.color = color def clip(self):", "color def clip(self): xlim, ylim = self.plotter.viewbox xmin, xmax =", "def redraw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1", "x0, y0 = p0[:2] x1, y1 = p1[:2] line2d =", "= p0[:2] x1, y1 = p1[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1])", "ymax]] return intersection_line_box_xy(self.line, box) @property def data(self): return [self.line.start[:2], self.line.end[:2]]", "else: points = self.clip() if points: p0, p1 = points", "= draw_as_segment self.line = line self.linewidth = linewidth self.linestyle =", "intersection_line_box_xy(self.line, box) @property def data(self): return [self.line.start[:2], self.line.end[:2]] def draw(self):", "__all__ = ['LineArtist'] class LineArtist(Artist): \"\"\"\"\"\" zorder = 1000 def", "= xlim ymin, ymax = ylim box = [[xmin, ymin],", "[self.line.start[:2], self.line.end[:2]] def draw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2]", "if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2]", "if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) def redraw(self):", "y0 = p0[:2] x1, y1 = p1[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0,", "self._draw_points = draw_points self._draw_as_segment = draw_as_segment self.line = line self.linewidth", "self._end_artist = self.plotter.add(self.line.end) else: points = self.clip() if points: p0,", "self._end_artist = self.plotter.add(self.line.end) def redraw(self): if self._draw_as_segment: x0, y0 =", "x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2] self._mpl_line.set_xdata([x0, x1])", "[[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]] return intersection_line_box_xy(self.line,", "draw_as_segment self.line = line self.linewidth = linewidth self.linestyle = linestyle", "if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) else: points", "line, draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0, 0, 0)): super(LineArtist, self).__init__(line)", "= self.line.start[:2] x1, y1 = self.line.end[:2] line2d = Line2D([x0, x1],", "= self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end)", "x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2] line2d =", "redraw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 =", "self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) else: points = self.clip() if points:" ]
[ "from lvq.utils import plot2d def main(): # Load dataset dataset", "import numpy as np from sklearn.metrics import accuracy_score from sklearn.model_selection", "y_train, epochs=30) # Predict the response for test dataset y_predict", ":-1].astype('float64') y = dataset[:, -1].astype('int64') # Split dataset into training", "sklearn.model_selection import train_test_split from lvq import SilvqModel from lvq.utils import", "model.fit(x_train, y_train, epochs=30) # Predict the response for test dataset", "into training set and test set x_train, x_test, y_train, y_test", "# Evaluating the model print('Accuracy: %.3f' %accuracy_score(y_test, y_predict)) # Plot", "model model = SilvqModel(x.shape[1], theta=0.8, bias_type='ls') # Training the model", "from sklearn.model_selection import train_test_split from lvq import SilvqModel from lvq.utils", "set x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=3,", "prediction results and prototypes plot2d(model, x, y, title='Artificial dataset1') if", "y, test_size=0.2, random_state=3, shuffle=True, stratify=y) # Generating model model =", "prototypes plot2d(model, x, y, title='Artificial dataset1') if __name__ == '__main__':", "as np from sklearn.metrics import accuracy_score from sklearn.model_selection import train_test_split", "%.3f' %accuracy_score(y_test, y_predict)) # Plot prediction results and prototypes plot2d(model,", "bias_type='ls') # Training the model model.fit(x_train, y_train, epochs=30) # Predict", "model.predict(x_test) # Evaluating the model print('Accuracy: %.3f' %accuracy_score(y_test, y_predict)) #", "# -*- encoding: utf8 -*- import numpy as np from", "= SilvqModel(x.shape[1], theta=0.8, bias_type='ls') # Training the model model.fit(x_train, y_train,", "numpy as np from sklearn.metrics import accuracy_score from sklearn.model_selection import", "Generating model model = SilvqModel(x.shape[1], theta=0.8, bias_type='ls') # Training the", "stratify=y) # Generating model model = SilvqModel(x.shape[1], theta=0.8, bias_type='ls') #", "lvq.utils import plot2d def main(): # Load dataset dataset =", "np from sklearn.metrics import accuracy_score from sklearn.model_selection import train_test_split from", "dataset[:, :-1].astype('float64') y = dataset[:, -1].astype('int64') # Split dataset into", "Training the model model.fit(x_train, y_train, epochs=30) # Predict the response", "accuracy_score from sklearn.model_selection import train_test_split from lvq import SilvqModel from", "plot2d def main(): # Load dataset dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',')", "import SilvqModel from lvq.utils import plot2d def main(): # Load", "= np.loadtxt('data/artificial_dataset1.csv', delimiter=',') x = dataset[:, :-1].astype('float64') y = dataset[:,", "# Training the model model.fit(x_train, y_train, epochs=30) # Predict the", "the model print('Accuracy: %.3f' %accuracy_score(y_test, y_predict)) # Plot prediction results", "y = dataset[:, -1].astype('int64') # Split dataset into training set", "shuffle=True, stratify=y) # Generating model model = SilvqModel(x.shape[1], theta=0.8, bias_type='ls')", "train_test_split(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y) # Generating model model", "delimiter=',') x = dataset[:, :-1].astype('float64') y = dataset[:, -1].astype('int64') #", "model print('Accuracy: %.3f' %accuracy_score(y_test, y_predict)) # Plot prediction results and", "Plot prediction results and prototypes plot2d(model, x, y, title='Artificial dataset1')", "= train_test_split(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y) # Generating model", "random_state=3, shuffle=True, stratify=y) # Generating model model = SilvqModel(x.shape[1], theta=0.8,", "import plot2d def main(): # Load dataset dataset = np.loadtxt('data/artificial_dataset1.csv',", "results and prototypes plot2d(model, x, y, title='Artificial dataset1') if __name__", "-*- import numpy as np from sklearn.metrics import accuracy_score from", "def main(): # Load dataset dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',') x", "dataset[:, -1].astype('int64') # Split dataset into training set and test", "training set and test set x_train, x_test, y_train, y_test =", "from lvq import SilvqModel from lvq.utils import plot2d def main():", "# Split dataset into training set and test set x_train,", "theta=0.8, bias_type='ls') # Training the model model.fit(x_train, y_train, epochs=30) #", "x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y)", "the response for test dataset y_predict = model.predict(x_test) # Evaluating", "x = dataset[:, :-1].astype('float64') y = dataset[:, -1].astype('int64') # Split", "response for test dataset y_predict = model.predict(x_test) # Evaluating the", "= model.predict(x_test) # Evaluating the model print('Accuracy: %.3f' %accuracy_score(y_test, y_predict))", "sklearn.metrics import accuracy_score from sklearn.model_selection import train_test_split from lvq import", "Load dataset dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',') x = dataset[:, :-1].astype('float64')", "# Predict the response for test dataset y_predict = model.predict(x_test)", "from sklearn.metrics import accuracy_score from sklearn.model_selection import train_test_split from lvq", "dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',') x = dataset[:, :-1].astype('float64') y =", "y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y) #", "utf8 -*- import numpy as np from sklearn.metrics import accuracy_score", "lvq import SilvqModel from lvq.utils import plot2d def main(): #", "y_predict)) # Plot prediction results and prototypes plot2d(model, x, y,", "epochs=30) # Predict the response for test dataset y_predict =", "y_predict = model.predict(x_test) # Evaluating the model print('Accuracy: %.3f' %accuracy_score(y_test,", "plot2d(model, x, y, title='Artificial dataset1') if __name__ == '__main__': main()", "model model.fit(x_train, y_train, epochs=30) # Predict the response for test", "-*- encoding: utf8 -*- import numpy as np from sklearn.metrics", "Evaluating the model print('Accuracy: %.3f' %accuracy_score(y_test, y_predict)) # Plot prediction", "np.loadtxt('data/artificial_dataset1.csv', delimiter=',') x = dataset[:, :-1].astype('float64') y = dataset[:, -1].astype('int64')", "# Plot prediction results and prototypes plot2d(model, x, y, title='Artificial", "and test set x_train, x_test, y_train, y_test = train_test_split(x, y,", "y_test = train_test_split(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y) # Generating", "test dataset y_predict = model.predict(x_test) # Evaluating the model print('Accuracy:", "# Load dataset dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',') x = dataset[:,", "-1].astype('int64') # Split dataset into training set and test set", "import accuracy_score from sklearn.model_selection import train_test_split from lvq import SilvqModel", "test set x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2,", "model = SilvqModel(x.shape[1], theta=0.8, bias_type='ls') # Training the model model.fit(x_train,", "Predict the response for test dataset y_predict = model.predict(x_test) #", "import train_test_split from lvq import SilvqModel from lvq.utils import plot2d", "main(): # Load dataset dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',') x =", "the model model.fit(x_train, y_train, epochs=30) # Predict the response for", "set and test set x_train, x_test, y_train, y_test = train_test_split(x,", "train_test_split from lvq import SilvqModel from lvq.utils import plot2d def", "%accuracy_score(y_test, y_predict)) # Plot prediction results and prototypes plot2d(model, x,", "dataset into training set and test set x_train, x_test, y_train,", "encoding: utf8 -*- import numpy as np from sklearn.metrics import", "# Generating model model = SilvqModel(x.shape[1], theta=0.8, bias_type='ls') # Training", "test_size=0.2, random_state=3, shuffle=True, stratify=y) # Generating model model = SilvqModel(x.shape[1],", "= dataset[:, :-1].astype('float64') y = dataset[:, -1].astype('int64') # Split dataset", "= dataset[:, -1].astype('int64') # Split dataset into training set and", "x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=3, shuffle=True,", "dataset y_predict = model.predict(x_test) # Evaluating the model print('Accuracy: %.3f'", "and prototypes plot2d(model, x, y, title='Artificial dataset1') if __name__ ==", "Split dataset into training set and test set x_train, x_test,", "for test dataset y_predict = model.predict(x_test) # Evaluating the model", "SilvqModel from lvq.utils import plot2d def main(): # Load dataset", "dataset dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',') x = dataset[:, :-1].astype('float64') y", "print('Accuracy: %.3f' %accuracy_score(y_test, y_predict)) # Plot prediction results and prototypes", "SilvqModel(x.shape[1], theta=0.8, bias_type='ls') # Training the model model.fit(x_train, y_train, epochs=30)" ]
[ "base model from the pre-trained model. Resnet 50 in this", "save and load import os # In[ ]: #some time", "for layer in base_model.layers[:fine_tune_at]: layer.trainable = False # In[ ]:", "8)) plt.subplot(2, 1, 1) plt.plot(acc, label='Training Accuracy') plt.plot(val_acc, label='Validation Accuracy')", "tf versions from tensorflow.python.platform import build_info as tf_build_info print(tf_build_info.cuda_version_number) #", "target_size=(IMG_HEIGHT, IMG_WIDTH), classes = list(CLASS_NAMES), class_mode='categorical' ) val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir),", "# In[ ]: IMG_SIZE = 224 IMG_SHAPE = (IMG_SIZE, IMG_SIZE,", "]: #To apply the model on new data new_model =", "Loss') plt.plot(epochs_range, val_loss, label='Validation Loss') plt.legend(loc='upper right') plt.title('Training and Validation", "# In[ ]: import tensorflow as tf import pathlib from", "new classification layer x = base_model.output x = tf.keras.layers.GlobalAveragePooling2D()(x) x", "acc, label='Training Accuracy') plt.plot(epochs_range, val_acc, label='Validation Accuracy') plt.legend(loc='lower right') plt.title('Training", "for our validation data train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT,", "and Validation Accuracy') plt.subplot(2, 1, 2) plt.plot(loss, label='Training Loss') plt.plot(val_loss,", "# 7 in v1.10.0 # In[ ]: import tensorflow as", "num_classes = len(CLASS_NAMES) #23 # In[ ]: #We use image", "layers are in the base model print(\"Layers base model: \",", "initial_epochs = 7 fine_tune_epochs = 3 total_epochs = initial_epochs +", "import image #image directory containing images to test img_dir=\"\\\\polyps\" for", "]: IMG_SIZE = 224 IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3) #", "plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.xlabel('epoch') plt.show() # In[", "tensorflow.keras.preprocessing.image import ImageDataGenerator import os import numpy as np import", "list(CLASS_NAMES) ) #get class order from directories print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys()) #", "#get class order from directories print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys()) # In[ ]:", "date and time. now = datetime.now() timestamp = datetime.timestamp(now) print(\"timestamp", "cuda and tf versions from tensorflow.python.platform import build_info as tf_build_info", "# In[ ]: tf.__version__ # In[ ]: #Train and test", "tf.keras.models.Model(inputs=base_model.input, outputs=x) base_learning_rate = 0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy', metrics=['accuracy']) # In[", "images are there image_count = len(list(train_data_dir.glob('*/*.jpg'))) image_count # In[ ]:", "timestamp) # In[ ]: mode_filename = str(timestamp)+'mymodel.h5' model.save(model_filename) # In[", "from directories print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys()) # In[ ]: IMG_SIZE = 224", "print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys()) # In[ ]: IMG_SIZE = 224 IMG_SHAPE =", "img_dir=\"\\\\polyps\" for i,img in enumerate(os.listdir(img_dir)): tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE)) tmpimage", "plt.plot(epochs_range, val_acc, label='Validation Accuracy') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy')", "False # In[ ]: model.compile(loss='categorical_crossentropy', optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy']) #", "plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='lower right') plt.title('Training and Validation", "tf.keras.layers.Dense(num_classes,activation='softmax')(x) model = tf.keras.models.Model(inputs=base_model.input, outputs=x) base_learning_rate = 0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy',", "model # In[ ]: # How many layers are in", "#fit the model history = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size,", "]: #Train and test data folder train_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\1\" test_data_dir", "base_model.trainable = False # In[ ]: #add new classification layer", "= history.history['val_accuracy'] loss = history.history['loss'] val_loss = history.history['val_loss'] epochs_range =", "100 # Freeze all the layers before the fine tune", "fine_tune_at = 100 # Freeze all the layers before the", "prepare them for the training train_image_generator = ImageDataGenerator() # Generator", "plt.figure(figsize=(8, 8)) plt.subplot(1, 2, 1) plt.plot(epochs_range, acc, label='Training Accuracy') plt.plot(epochs_range,", "acc = history.history['accuracy'] val_acc = history.history['val_accuracy'] loss = history.history['loss'] val_loss", "]: acc += history_fine.history['accuracy'] val_acc += history_fine.history['val_accuracy'] loss += history_fine.history['loss']", "plt.subplot(2, 1, 2) plt.plot(loss, label='Training Loss') plt.plot(val_loss, label='Validation Loss') plt.ylim([0,", "In[ ]: #Train and test data folder train_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\1\"", "weights='imagenet') base_model.trainable = False # In[ ]: #add new classification", "]: #model save and load import os # In[ ]:", "= 224 IMG_WIDTH = 224 STEPS_PER_EPOCH = np.ceil(image_count/batch_size) epochs =", "= 224 IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3) # base model", "# 9.0 in v1.10.0 print(tf_build_info.cudnn_version_number) # 7 in v1.10.0 #", "import numpy as np import matplotlib.pyplot as plt # In[", "data generators to load the images and prepare them for", "from __future__ import absolute_import, division, print_function, unicode_literals # In[ ]:", "ImageDataGenerator import os import numpy as np import matplotlib.pyplot as", "= len(list(train_data_dir.glob('*/*.jpg'))) image_count # In[ ]: total_train = len(list(train_data_dir.glob('*/*.jpg'))) total_val", "= len(CLASS_NAMES) #23 # In[ ]: #We use image data", "In[ ]: AUTOTUNE = tf.data.experimental.AUTOTUNE # In[ ]: import IPython.display", "generators to load the images and prepare them for the", "= (IMG_SIZE, IMG_SIZE, 3) # base model from the pre-trained", "]: #some time stamp from datetime import datetime # current", "# Generator for our validation data train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size,", "]: #Define parameter for training batch_size = 32 IMG_HEIGHT =", "import tensorflow as tf import pathlib from tensorflow.keras.models import Sequential", "metrics=['accuracy']) # In[ ]: model.summary() # In[ ]: #Fine tune", "IMG_SIZE, 3) # base model from the pre-trained model. Resnet", "the model architecture new_model.summary() # In[ ]: from tensorflow.keras.preprocessing import", "__future__ import absolute_import, division, print_function, unicode_literals # In[ ]: #Checking", "# In[ ]: #Train and test data folder train_data_dir =", "matplotlib.pyplot as plt import os # In[ ]: tf.__version__ #", "# In[ ]: #fit the model history = model.fit_generator( train_data_gen,", "= datetime.timestamp(now) print(\"timestamp =\", timestamp) # In[ ]: mode_filename =", "class_mode='categorical', classes = list(CLASS_NAMES) ) #get class order from directories", "base_model.trainable = True #now we want to train the base", "right') plt.title('Training and Validation Accuracy') plt.subplot(1, 2, 2) plt.plot(epochs_range, loss,", "import datetime # current date and time. now = datetime.now()", "total_train = len(list(train_data_dir.glob('*/*.jpg'))) total_val = len(list(test_data_dir.glob('*/*.jpg'))) # In[ ]: #get", "Generator for our validation data train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size, shuffle=True,", "test_data_dir = pathlib.Path(test_data_dir) # In[ ]: #count how many images", "val_acc = history.history['val_accuracy'] loss = history.history['loss'] val_loss = history.history['val_loss'] epochs_range", "# In[ ]: #count how many images are there image_count", "our training data validation_image_generator = ImageDataGenerator() # Generator for our", "1.0]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='upper right') plt.title('Training and", "In[ ]: IMG_SIZE = 224 IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3)", "2, 2) plt.plot(epochs_range, loss, label='Training Loss') plt.plot(epochs_range, val_loss, label='Validation Loss')", "architecture new_model.summary() # In[ ]: from tensorflow.keras.preprocessing import image #image", "]: import tensorflow as tf import pathlib from tensorflow.keras.models import", "val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), class_mode='categorical', classes =", "In[ ]: model.compile(loss='categorical_crossentropy', optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy']) # In[ ]:", "mode_filename = str(timestamp)+'mymodel.h5' model.save(model_filename) # In[ ]: #To apply the", "!= \"LICENSE.txt\"]) CLASS_NAMES # In[ ]: #Define parameter for training", "+= history_fine.history['val_accuracy'] loss += history_fine.history['loss'] val_loss += history_fine.history['val_loss'] # In[", "In[ ]: import IPython.display as display from PIL import Image", "label='Training Loss') plt.plot(epochs_range, val_loss, label='Validation Loss') plt.legend(loc='upper right') plt.title('Training and", "images and prepare them for the training train_image_generator = ImageDataGenerator()", "(IMG_SIZE, IMG_SIZE, 3) # base model from the pre-trained model.", "]: #add new classification layer x = base_model.output x =", "batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), class_mode='categorical', classes = list(CLASS_NAMES) ) #get", "In[ ]: base_model.trainable = True #now we want to train", "the fine tune starting layer for layer in base_model.layers[:fine_tune_at]: layer.trainable", "versions from tensorflow.python.platform import build_info as tf_build_info print(tf_build_info.cuda_version_number) # 9.0", "base_model.layers[:fine_tune_at]: layer.trainable = False # In[ ]: model.compile(loss='categorical_crossentropy', optimizer =", "val_loss = history.history['val_loss'] epochs_range = range(epochs) plt.figure(figsize=(8, 8)) plt.subplot(1, 2,", "= 0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy', metrics=['accuracy']) # In[ ]: #fit the", "True #now we want to train the base model #", "from tensorflow.python.platform import build_info as tf_build_info print(tf_build_info.cuda_version_number) # 9.0 in", "as plt # In[ ]: AUTOTUNE = tf.data.experimental.AUTOTUNE # In[", "model.save(model_filename) # In[ ]: #To apply the model on new", "initial_epochs + fine_tune_epochs train_batches = total_train // batch_size print(total_val //", "image_count = len(list(train_data_dir.glob('*/*.jpg'))) image_count # In[ ]: total_train = len(list(train_data_dir.glob('*/*.jpg')))", "= tf.data.experimental.AUTOTUNE # In[ ]: import IPython.display as display from", "In[ ]: tf.__version__ # In[ ]: #Train and test data", "history.history['accuracy'] val_acc = history.history['val_accuracy'] loss = history.history['loss'] val_loss = history.history['val_loss']", "model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=total_epochs, initial_epoch = history.epoch[-1], validation_data=val_data_gen,", "Loss') plt.plot(val_loss, label='Validation Loss') plt.ylim([0, 1.0]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine", "base_learning_rate = 0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy', metrics=['accuracy']) # In[ ]: #fit", "+= history_fine.history['accuracy'] val_acc += history_fine.history['val_accuracy'] loss += history_fine.history['loss'] val_loss +=", "train_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\1\" test_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\0\" # In[ ]: train_data_dir", "x = base_model.output x = tf.keras.layers.GlobalAveragePooling2D()(x) x = tf.keras.layers.Dense(num_classes,activation='softmax')(x) model", "val_loss += history_fine.history['val_loss'] # In[ ]: #Plot fine tuning plt.figure(figsize=(8,", "containing images to test img_dir=\"\\\\polyps\" for i,img in enumerate(os.listdir(img_dir)): tmpimage", "total_val = len(list(test_data_dir.glob('*/*.jpg'))) # In[ ]: #get the class names", "numpy as np import matplotlib.pyplot as plt import os #", "right') plt.title('Training and Validation Accuracy') plt.subplot(2, 1, 2) plt.plot(loss, label='Training", "there image_count = len(list(train_data_dir.glob('*/*.jpg'))) image_count # In[ ]: total_train =", "= 8 num_classes = len(CLASS_NAMES) #23 # In[ ]: #We", "= len(list(train_data_dir.glob('*/*.jpg'))) total_val = len(list(test_data_dir.glob('*/*.jpg'))) # In[ ]: #get the", "= history.history['val_loss'] epochs_range = range(epochs) plt.figure(figsize=(8, 8)) plt.subplot(1, 2, 1)", "plt.show() # In[ ]: base_model.trainable = True #now we want", "# In[ ]: #create training plots history acc = history.history['accuracy']", "batch_size ) # In[ ]: #create training plots history acc", "base model print(\"Layers base model: \", len(base_model.layers)) # Fine tune", "model. Resnet 50 in this case base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False,", "tensorflow.keras.preprocessing import image #image directory containing images to test img_dir=\"\\\\polyps\"", "datetime import datetime # current date and time. now =", "in base_model.layers[:fine_tune_at]: layer.trainable = False # In[ ]: model.compile(loss='categorical_crossentropy', optimizer", "1, 2) plt.plot(loss, label='Training Loss') plt.plot(val_loss, label='Validation Loss') plt.ylim([0, 1.0])", "= np.ceil(image_count/batch_size) epochs = 8 num_classes = len(CLASS_NAMES) #23 #", "we want to train the base model # In[ ]:", "plt.title('Training and Validation Accuracy') plt.subplot(2, 1, 2) plt.plot(loss, label='Training Loss')", "# In[ ]: #model save and load import os #", "= \"\\\\hyper-kvasir\\\\splits\\\\all\\\\0\" # In[ ]: train_data_dir = pathlib.Path(train_data_dir) test_data_dir =", "tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy']) # In[ ]: model.summary() # In[ ]: #Fine", "str(timestamp)+'mymodel.h5' model.save(model_filename) # In[ ]: #To apply the model on", "and test data folder train_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\1\" test_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\0\"", "ImageDataGenerator() # Generator for our validation data train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir),", "CLASS_NAMES = np.array([item.name for item in train_data_dir.glob('*') if item.name !=", "on new data new_model = tf.keras.models.load_model(model_filename) # Show the model", "plt.title('Training and Validation Accuracy') plt.subplot(1, 2, 2) plt.plot(epochs_range, loss, label='Training", "# Generator for our training data validation_image_generator = ImageDataGenerator() #", "right') plt.title('Training and Validation Loss') plt.show() # In[ ]: base_model.trainable", "load the images and prepare them for the training train_image_generator", "plt.ylim(), label='Start Fine Tuning') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy')", "Fine Tuning') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(2, 1,", "tf.__version__ # In[ ]: #Train and test data folder train_data_dir", "= range(epochs) plt.figure(figsize=(8, 8)) plt.subplot(1, 2, 1) plt.plot(epochs_range, acc, label='Training", "model print(\"Layers base model: \", len(base_model.layers)) # Fine tune from", "data folder train_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\1\" test_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\0\" # In[", "to load the images and prepare them for the training", "#Fine tune step initial_epochs = 7 fine_tune_epochs = 3 total_epochs", "numpy as np import matplotlib.pyplot as plt # In[ ]:", "steps_per_epoch=total_train // batch_size, epochs=total_epochs, initial_epoch = history.epoch[-1], validation_data=val_data_gen, validation_steps=total_val //", "batch_size, epochs=total_epochs, initial_epoch = history.epoch[-1], validation_data=val_data_gen, validation_steps=total_val // batch_size )", "plt.title('Training and Validation Loss') plt.show() # In[ ]: base_model.trainable =", "# In[ ]: from tensorflow.keras.preprocessing import image #image directory containing", "directory containing images to test img_dir=\"\\\\polyps\" for i,img in enumerate(os.listdir(img_dir)):", "import absolute_import, division, print_function, unicode_literals # In[ ]: #Checking for", "tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout,", "3) # base model from the pre-trained model. Resnet 50", "base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False, weights='imagenet') base_model.trainable = False # In[", "Validation Loss') plt.show() # In[ ]: base_model.trainable = True #now", "print(\"timestamp =\", timestamp) # In[ ]: mode_filename = str(timestamp)+'mymodel.h5' model.save(model_filename)", "for our training data validation_image_generator = ImageDataGenerator() # Generator for", "in train_data_dir.glob('*') if item.name != \"LICENSE.txt\"]) CLASS_NAMES # In[ ]:", "model from the pre-trained model. Resnet 50 in this case", "val_acc, label='Validation Accuracy') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(1,", "]: #Importing all required libraries # In[ ]: from __future__", "ImageDataGenerator() # Generator for our training data validation_image_generator = ImageDataGenerator()", "batch_size print(total_val // batch_size) validation_batches = total_val // batch_size history_fine", "plt.ylim(), label='Start Fine Tuning') plt.legend(loc='upper right') plt.title('Training and Validation Loss')", "= len(list(test_data_dir.glob('*/*.jpg'))) # In[ ]: #get the class names CLASS_NAMES", "#Train and test data folder train_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\1\" test_data_dir =", "= \"\\\\hyper-kvasir\\\\splits\\\\all\\\\1\" test_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\0\" # In[ ]: train_data_dir =", "right') plt.title('Training and Validation Loss') plt.xlabel('epoch') plt.show() # In[ ]:", "1]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='lower right') plt.title('Training and", "the pre-trained model. Resnet 50 in this case base_model =", "Show the model architecture new_model.summary() # In[ ]: from tensorflow.keras.preprocessing", "tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D from tensorflow.keras.preprocessing.image import", "print(val_data_gen.class_indices.keys()) # In[ ]: IMG_SIZE = 224 IMG_SHAPE = (IMG_SIZE,", "= 3 total_epochs = initial_epochs + fine_tune_epochs train_batches = total_train", "= 32 IMG_HEIGHT = 224 IMG_WIDTH = 224 STEPS_PER_EPOCH =", "// batch_size, epochs=epochs, validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[", "for training batch_size = 32 IMG_HEIGHT = 224 IMG_WIDTH =", "// batch_size history_fine = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=total_epochs,", "IPython.display as display from PIL import Image import numpy as", "as tf_build_info print(tf_build_info.cuda_version_number) # 9.0 in v1.10.0 print(tf_build_info.cudnn_version_number) # 7", "7 fine_tune_epochs = 3 total_epochs = initial_epochs + fine_tune_epochs train_batches", "epochs=epochs, validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[ ]: #create", "validation_steps=total_val // batch_size ) # In[ ]: acc += history_fine.history['accuracy']", "unicode_literals # In[ ]: #Checking for correct cuda and tf", "# In[ ]: base_model.trainable = True #now we want to", "print(total_val // batch_size) validation_batches = total_val // batch_size history_fine =", "fine tuning plt.figure(figsize=(8, 8)) plt.subplot(2, 1, 1) plt.plot(acc, label='Training Accuracy')", "]: #create training plots history acc = history.history['accuracy'] val_acc =", "shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), classes = list(CLASS_NAMES), class_mode='categorical' ) val_data_gen =", "224 IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3) # base model from", "In[ ]: #get the class names CLASS_NAMES = np.array([item.name for", "#now we want to train the base model # In[", "and Validation Accuracy') plt.subplot(1, 2, 2) plt.plot(epochs_range, loss, label='Training Loss')", "= tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy']) # In[ ]: model.summary() # In[ ]:", "= str(timestamp)+'mymodel.h5' model.save(model_filename) # In[ ]: #To apply the model", "the base model print(\"Layers base model: \", len(base_model.layers)) # Fine", "metrics=['accuracy']) # In[ ]: #fit the model history = model.fit_generator(", "+= history_fine.history['loss'] val_loss += history_fine.history['val_loss'] # In[ ]: #Plot fine", "case base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False, weights='imagenet') base_model.trainable = False #", "len(base_model.layers)) # Fine tune from layer x fine_tune_at = 100", "as np import matplotlib.pyplot as plt import os # In[", "= history.history['accuracy'] val_acc = history.history['val_accuracy'] loss = history.history['loss'] val_loss =", "fine tune starting layer for layer in base_model.layers[:fine_tune_at]: layer.trainable =", "layer x fine_tune_at = 100 # Freeze all the layers", "\"\\\\hyper-kvasir\\\\splits\\\\all\\\\0\" # In[ ]: train_data_dir = pathlib.Path(train_data_dir) test_data_dir = pathlib.Path(test_data_dir)", "= ImageDataGenerator() # Generator for our validation data train_data_gen =", "datetime.timestamp(now) print(\"timestamp =\", timestamp) # In[ ]: mode_filename = str(timestamp)+'mymodel.h5'", "the model history = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=epochs,", "# In[ ]: total_train = len(list(train_data_dir.glob('*/*.jpg'))) total_val = len(list(test_data_dir.glob('*/*.jpg'))) #", "item in train_data_dir.glob('*') if item.name != \"LICENSE.txt\"]) CLASS_NAMES # In[", "tf.keras.layers.GlobalAveragePooling2D()(x) x = tf.keras.layers.Dense(num_classes,activation='softmax')(x) model = tf.keras.models.Model(inputs=base_model.input, outputs=x) base_learning_rate =", "total_val // batch_size history_fine = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size,", "plt.subplot(2, 1, 1) plt.plot(acc, label='Training Accuracy') plt.plot(val_acc, label='Validation Accuracy') plt.ylim([0.8,", "]: #count how many images are there image_count = len(list(train_data_dir.glob('*/*.jpg')))", "#23 # In[ ]: #We use image data generators to", "import build_info as tf_build_info print(tf_build_info.cuda_version_number) # 9.0 in v1.10.0 print(tf_build_info.cudnn_version_number)", "required libraries # In[ ]: from __future__ import absolute_import, division,", "initial_epoch = history.epoch[-1], validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[", "now = datetime.now() timestamp = datetime.timestamp(now) print(\"timestamp =\", timestamp) #", "In[ ]: from tensorflow.keras.preprocessing import image #image directory containing images", "to train the base model # In[ ]: # How", "from datetime import datetime # current date and time. now", "Fine tune from layer x fine_tune_at = 100 # Freeze", "8 num_classes = len(CLASS_NAMES) #23 # In[ ]: #We use", "# base model from the pre-trained model. Resnet 50 in", "print(tf_build_info.cudnn_version_number) # 7 in v1.10.0 # In[ ]: import tensorflow", "= True #now we want to train the base model", "plt.plot(epochs_range, loss, label='Training Loss') plt.plot(epochs_range, val_loss, label='Validation Loss') plt.legend(loc='upper right')", "plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='upper right') plt.title('Training and Validation", "are there image_count = len(list(train_data_dir.glob('*/*.jpg'))) image_count # In[ ]: total_train", "in v1.10.0 # In[ ]: import tensorflow as tf import", "3 total_epochs = initial_epochs + fine_tune_epochs train_batches = total_train //", "how many images are there image_count = len(list(train_data_dir.glob('*/*.jpg'))) image_count #", "training plots history acc = history.history['accuracy'] val_acc = history.history['val_accuracy'] loss", "# In[ ]: from __future__ import absolute_import, division, print_function, unicode_literals", "history = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=epochs, validation_data=val_data_gen, validation_steps=total_val", "plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(2, 1, 2) plt.plot(loss,", "In[ ]: from __future__ import absolute_import, division, print_function, unicode_literals #", "timestamp = datetime.timestamp(now) print(\"timestamp =\", timestamp) # In[ ]: mode_filename", "model: \", len(base_model.layers)) # Fine tune from layer x fine_tune_at", "outputs=x) base_learning_rate = 0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy', metrics=['accuracy']) # In[ ]:", "model.summary() # In[ ]: #Fine tune step initial_epochs = 7", "1, 1) plt.plot(acc, label='Training Accuracy') plt.plot(val_acc, label='Validation Accuracy') plt.ylim([0.8, 1])", "the model on new data new_model = tf.keras.models.load_model(model_filename) # Show", "= ImageDataGenerator() # Generator for our training data validation_image_generator =", "plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.show() # In[ ]:", "IMG_WIDTH), classes = list(CLASS_NAMES), class_mode='categorical' ) val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size,", "In[ ]: #count how many images are there image_count =", "224 IMG_WIDTH = 224 STEPS_PER_EPOCH = np.ceil(image_count/batch_size) epochs = 8", "AUTOTUNE = tf.data.experimental.AUTOTUNE # In[ ]: import IPython.display as display", "layer for layer in base_model.layers[:fine_tune_at]: layer.trainable = False # In[", "In[ ]: #some time stamp from datetime import datetime #", "validation_image_generator = ImageDataGenerator() # Generator for our validation data train_data_gen", "model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=epochs, validation_data=val_data_gen, validation_steps=total_val // batch_size", "Accuracy') plt.subplot(2, 1, 2) plt.plot(loss, label='Training Loss') plt.plot(val_loss, label='Validation Loss')", "In[ ]: import tensorflow as tf import pathlib from tensorflow.keras.models", "+ fine_tune_epochs train_batches = total_train // batch_size print(total_val // batch_size)", "# Fine tune from layer x fine_tune_at = 100 #", "epochs=total_epochs, initial_epoch = history.epoch[-1], validation_data=val_data_gen, validation_steps=total_val // batch_size ) #", "class_mode='categorical' ) val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), class_mode='categorical',", "validation_batches = total_val // batch_size history_fine = model.fit_generator( train_data_gen, steps_per_epoch=total_train", "]: #fit the model history = model.fit_generator( train_data_gen, steps_per_epoch=total_train //", "= pathlib.Path(test_data_dir) # In[ ]: #count how many images are", "#Checking for correct cuda and tf versions from tensorflow.python.platform import", "train_batches = total_train // batch_size print(total_val // batch_size) validation_batches =", "data validation_image_generator = ImageDataGenerator() # Generator for our validation data", "2) plt.plot(loss, label='Training Loss') plt.plot(val_loss, label='Validation Loss') plt.ylim([0, 1.0]) plt.plot([initial_epochs-1,initial_epochs-1],", "history_fine.history['val_accuracy'] loss += history_fine.history['loss'] val_loss += history_fine.history['val_loss'] # In[ ]:", "tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False, weights='imagenet') base_model.trainable = False # In[ ]: #add", "In[ ]: train_data_dir = pathlib.Path(train_data_dir) test_data_dir = pathlib.Path(test_data_dir) # In[", "enumerate(os.listdir(img_dir)): tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE)) tmpimage = np.expand_dims(tmpimage, axis=0).astype('float32') result_class=new_model.predict(tmpimage)", "utf-8 # In[ ]: #Importing all required libraries # In[", "IMG_WIDTH), class_mode='categorical', classes = list(CLASS_NAMES) ) #get class order from", "8)) plt.subplot(1, 2, 1) plt.plot(epochs_range, acc, label='Training Accuracy') plt.plot(epochs_range, val_acc,", "data train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), classes =", "val_acc += history_fine.history['val_accuracy'] loss += history_fine.history['loss'] val_loss += history_fine.history['val_loss'] #", "layer.trainable = False # In[ ]: model.compile(loss='categorical_crossentropy', optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10),", "division, print_function, unicode_literals # In[ ]: #Checking for correct cuda", "include_top=False, weights='imagenet') base_model.trainable = False # In[ ]: #add new", "label='Training Accuracy') plt.plot(epochs_range, val_acc, label='Validation Accuracy') plt.legend(loc='lower right') plt.title('Training and", "base_model.output x = tf.keras.layers.GlobalAveragePooling2D()(x) x = tf.keras.layers.Dense(num_classes,activation='softmax')(x) model = tf.keras.models.Model(inputs=base_model.input,", "label='Validation Loss') plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.show() #", "test_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\0\" # In[ ]: train_data_dir = pathlib.Path(train_data_dir) test_data_dir", "plt.title('Training and Validation Loss') plt.xlabel('epoch') plt.show() # In[ ]: #model", "Generator for our training data validation_image_generator = ImageDataGenerator() # Generator", "]: #get the class names CLASS_NAMES = np.array([item.name for item", "Loss') plt.xlabel('epoch') plt.show() # In[ ]: #model save and load", "from tensorflow.keras.preprocessing.image import ImageDataGenerator import os import numpy as np", "= model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=epochs, validation_data=val_data_gen, validation_steps=total_val //", "list(CLASS_NAMES), class_mode='categorical' ) val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH),", "In[ ]: mode_filename = str(timestamp)+'mymodel.h5' model.save(model_filename) # In[ ]: #To", "= False # In[ ]: #add new classification layer x", "= 224 STEPS_PER_EPOCH = np.ceil(image_count/batch_size) epochs = 8 num_classes =", "= train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), classes = list(CLASS_NAMES), class_mode='categorical'", "import ImageDataGenerator import os import numpy as np import matplotlib.pyplot", "np.array([item.name for item in train_data_dir.glob('*') if item.name != \"LICENSE.txt\"]) CLASS_NAMES", "optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy']) # In[ ]: model.summary() # In[", "names CLASS_NAMES = np.array([item.name for item in train_data_dir.glob('*') if item.name", "history.history['val_loss'] epochs_range = range(epochs) plt.figure(figsize=(8, 8)) plt.subplot(1, 2, 1) plt.plot(epochs_range,", "= total_val // batch_size history_fine = model.fit_generator( train_data_gen, steps_per_epoch=total_train //", "new data new_model = tf.keras.models.load_model(model_filename) # Show the model architecture", "acc += history_fine.history['accuracy'] val_acc += history_fine.history['val_accuracy'] loss += history_fine.history['loss'] val_loss", "x = tf.keras.layers.GlobalAveragePooling2D()(x) x = tf.keras.layers.Dense(num_classes,activation='softmax')(x) model = tf.keras.models.Model(inputs=base_model.input, outputs=x)", "history.history['val_accuracy'] loss = history.history['loss'] val_loss = history.history['val_loss'] epochs_range = range(epochs)", "tensorflow.python.platform import build_info as tf_build_info print(tf_build_info.cuda_version_number) # 9.0 in v1.10.0", "= total_train // batch_size print(total_val // batch_size) validation_batches = total_val", "fine_tune_epochs = 3 total_epochs = initial_epochs + fine_tune_epochs train_batches =", "# In[ ]: #some time stamp from datetime import datetime", "Tuning') plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.xlabel('epoch') plt.show() #", "len(list(train_data_dir.glob('*/*.jpg'))) image_count # In[ ]: total_train = len(list(train_data_dir.glob('*/*.jpg'))) total_val =", "Loss') plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.show() # In[", "Dense, Conv2D, Flatten, Dropout, MaxPooling2D from tensorflow.keras.preprocessing.image import ImageDataGenerator import", "pathlib.Path(train_data_dir) test_data_dir = pathlib.Path(test_data_dir) # In[ ]: #count how many", "= base_model.output x = tf.keras.layers.GlobalAveragePooling2D()(x) x = tf.keras.layers.Dense(num_classes,activation='softmax')(x) model =", "as tf import pathlib from tensorflow.keras.models import Sequential from tensorflow.keras.layers", "#some time stamp from datetime import datetime # current date", "Accuracy') plt.subplot(1, 2, 2) plt.plot(epochs_range, loss, label='Training Loss') plt.plot(epochs_range, val_loss,", "#image directory containing images to test img_dir=\"\\\\polyps\" for i,img in", "base model: \", len(base_model.layers)) # Fine tune from layer x", "\", len(base_model.layers)) # Fine tune from layer x fine_tune_at =", "# In[ ]: #Plot fine tuning plt.figure(figsize=(8, 8)) plt.subplot(2, 1,", "import os # In[ ]: tf.__version__ # In[ ]: #Train", "plots history acc = history.history['accuracy'] val_acc = history.history['val_accuracy'] loss =", "history acc = history.history['accuracy'] val_acc = history.history['val_accuracy'] loss = history.history['loss']", "in the base model print(\"Layers base model: \", len(base_model.layers)) #", "fine_tune_epochs train_batches = total_train // batch_size print(total_val // batch_size) validation_batches", "new_model.summary() # In[ ]: from tensorflow.keras.preprocessing import image #image directory", "In[ ]: #Plot fine tuning plt.figure(figsize=(8, 8)) plt.subplot(2, 1, 1)", "label='Validation Accuracy') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(1, 2,", "print(\"Layers base model: \", len(base_model.layers)) # Fine tune from layer", "= False # In[ ]: model.compile(loss='categorical_crossentropy', optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy'])", "model on new data new_model = tf.keras.models.load_model(model_filename) # Show the", "#Define parameter for training batch_size = 32 IMG_HEIGHT = 224", "= initial_epochs + fine_tune_epochs train_batches = total_train // batch_size print(total_val", "In[ ]: #Importing all required libraries # In[ ]: from", "build_info as tf_build_info print(tf_build_info.cuda_version_number) # 9.0 in v1.10.0 print(tf_build_info.cudnn_version_number) #", "#We use image data generators to load the images and", "time. now = datetime.now() timestamp = datetime.timestamp(now) print(\"timestamp =\", timestamp)", "= np.array([item.name for item in train_data_dir.glob('*') if item.name != \"LICENSE.txt\"])", "]: #Plot fine tuning plt.figure(figsize=(8, 8)) plt.subplot(2, 1, 1) plt.plot(acc,", "Loss') plt.ylim([0, 1.0]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='upper right')", ") # In[ ]: acc += history_fine.history['accuracy'] val_acc += history_fine.history['val_accuracy']", "from the pre-trained model. Resnet 50 in this case base_model", "// batch_size, epochs=total_epochs, initial_epoch = history.epoch[-1], validation_data=val_data_gen, validation_steps=total_val // batch_size", "range(epochs) plt.figure(figsize=(8, 8)) plt.subplot(1, 2, 1) plt.plot(epochs_range, acc, label='Training Accuracy')", "os import numpy as np import matplotlib.pyplot as plt #", "]: AUTOTUNE = tf.data.experimental.AUTOTUNE # In[ ]: import IPython.display as", "32 IMG_HEIGHT = 224 IMG_WIDTH = 224 STEPS_PER_EPOCH = np.ceil(image_count/batch_size)", "= history.history['loss'] val_loss = history.history['val_loss'] epochs_range = range(epochs) plt.figure(figsize=(8, 8))", "# In[ ]: #To apply the model on new data", "Accuracy') plt.plot(val_acc, label='Validation Accuracy') plt.ylim([0.8, 1]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine", "model.compile(loss='categorical_crossentropy', optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy']) # In[ ]: model.summary() #", "class names CLASS_NAMES = np.array([item.name for item in train_data_dir.glob('*') if", "loss='categorical_crossentropy', metrics=['accuracy']) # In[ ]: #fit the model history =", "// batch_size ) # In[ ]: acc += history_fine.history['accuracy'] val_acc", "os # In[ ]: tf.__version__ # In[ ]: #Train and", "]: tf.__version__ # In[ ]: #Train and test data folder", "the base model # In[ ]: # How many layers", "= tf.keras.models.load_model(model_filename) # Show the model architecture new_model.summary() # In[", "matplotlib.pyplot as plt # In[ ]: AUTOTUNE = tf.data.experimental.AUTOTUNE #", "import pathlib from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense,", "plt.plot(epochs_range, val_loss, label='Validation Loss') plt.legend(loc='upper right') plt.title('Training and Validation Loss')", "# In[ ]: #add new classification layer x = base_model.output", "use image data generators to load the images and prepare", "# Freeze all the layers before the fine tune starting", "import Dense, Conv2D, Flatten, Dropout, MaxPooling2D from tensorflow.keras.preprocessing.image import ImageDataGenerator", "In[ ]: #To apply the model on new data new_model", "current date and time. now = datetime.now() timestamp = datetime.timestamp(now)", "i,img in enumerate(os.listdir(img_dir)): tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE)) tmpimage = np.expand_dims(tmpimage,", "Conv2D, Flatten, Dropout, MaxPooling2D from tensorflow.keras.preprocessing.image import ImageDataGenerator import os", "In[ ]: total_train = len(list(train_data_dir.glob('*/*.jpg'))) total_val = len(list(test_data_dir.glob('*/*.jpg'))) # In[", "test data folder train_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\1\" test_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\0\" #", "steps_per_epoch=total_train // batch_size, epochs=epochs, validation_data=val_data_gen, validation_steps=total_val // batch_size ) #", "batch_size) validation_batches = total_val // batch_size history_fine = model.fit_generator( train_data_gen,", "= list(CLASS_NAMES), class_mode='categorical' ) val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT,", "epochs = 8 num_classes = len(CLASS_NAMES) #23 # In[ ]:", "2) plt.plot(epochs_range, loss, label='Training Loss') plt.plot(epochs_range, val_loss, label='Validation Loss') plt.legend(loc='upper", "plt.plot(loss, label='Training Loss') plt.plot(val_loss, label='Validation Loss') plt.ylim([0, 1.0]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(),", "// batch_size print(total_val // batch_size) validation_batches = total_val // batch_size", "data new_model = tf.keras.models.load_model(model_filename) # Show the model architecture new_model.summary()", "if item.name != \"LICENSE.txt\"]) CLASS_NAMES # In[ ]: #Define parameter", "#model save and load import os # In[ ]: #some", "order from directories print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys()) # In[ ]: IMG_SIZE =", "load import os # In[ ]: #some time stamp from", "# In[ ]: train_data_dir = pathlib.Path(train_data_dir) test_data_dir = pathlib.Path(test_data_dir) #", "Image import numpy as np import matplotlib.pyplot as plt import", "= list(CLASS_NAMES) ) #get class order from directories print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys())", "np import matplotlib.pyplot as plt # In[ ]: AUTOTUNE =", "= tf.keras.layers.GlobalAveragePooling2D()(x) x = tf.keras.layers.Dense(num_classes,activation='softmax')(x) model = tf.keras.models.Model(inputs=base_model.input, outputs=x) base_learning_rate", "label='Training Accuracy') plt.plot(val_acc, label='Validation Accuracy') plt.ylim([0.8, 1]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start", "the layers before the fine tune starting layer for layer", "classes = list(CLASS_NAMES) ) #get class order from directories print(train_data_gen.class_indices.keys())", "shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), class_mode='categorical', classes = list(CLASS_NAMES) ) #get class", "Accuracy') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(1, 2, 2)", ") # In[ ]: #create training plots history acc =", "new_model = tf.keras.models.load_model(model_filename) # Show the model architecture new_model.summary() #", "batch_size, epochs=epochs, validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[ ]:", "are in the base model print(\"Layers base model: \", len(base_model.layers))", "# In[ ]: #Fine tune step initial_epochs = 7 fine_tune_epochs", "total_train // batch_size print(total_val // batch_size) validation_batches = total_val //", "CLASS_NAMES # In[ ]: #Define parameter for training batch_size =", "In[ ]: #Checking for correct cuda and tf versions from", "plt.figure(figsize=(8, 8)) plt.subplot(2, 1, 1) plt.plot(acc, label='Training Accuracy') plt.plot(val_acc, label='Validation", "libraries # In[ ]: from __future__ import absolute_import, division, print_function,", "the images and prepare them for the training train_image_generator =", "plt.plot(acc, label='Training Accuracy') plt.plot(val_acc, label='Validation Accuracy') plt.ylim([0.8, 1]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(),", "\"LICENSE.txt\"]) CLASS_NAMES # In[ ]: #Define parameter for training batch_size", "image data generators to load the images and prepare them", "for correct cuda and tf versions from tensorflow.python.platform import build_info", "for item in train_data_dir.glob('*') if item.name != \"LICENSE.txt\"]) CLASS_NAMES #", "# In[ ]: #get the class names CLASS_NAMES = np.array([item.name", "batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), classes = list(CLASS_NAMES), class_mode='categorical' ) val_data_gen", "= history.epoch[-1], validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[ ]:", "epochs_range = range(epochs) plt.figure(figsize=(8, 8)) plt.subplot(1, 2, 1) plt.plot(epochs_range, acc,", "starting layer for layer in base_model.layers[:fine_tune_at]: layer.trainable = False #", "as display from PIL import Image import numpy as np", "#get the class names CLASS_NAMES = np.array([item.name for item in", "tune starting layer for layer in base_model.layers[:fine_tune_at]: layer.trainable = False", "224 STEPS_PER_EPOCH = np.ceil(image_count/batch_size) epochs = 8 num_classes = len(CLASS_NAMES)", "MaxPooling2D from tensorflow.keras.preprocessing.image import ImageDataGenerator import os import numpy as", "correct cuda and tf versions from tensorflow.python.platform import build_info as", "print_function, unicode_literals # In[ ]: #Checking for correct cuda and", "model architecture new_model.summary() # In[ ]: from tensorflow.keras.preprocessing import image", "history.history['loss'] val_loss = history.history['val_loss'] epochs_range = range(epochs) plt.figure(figsize=(8, 8)) plt.subplot(1,", "# In[ ]: # How many layers are in the", "display from PIL import Image import numpy as np import", "classes = list(CLASS_NAMES), class_mode='categorical' ) val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size, shuffle=True,", "time stamp from datetime import datetime # current date and", "apply the model on new data new_model = tf.keras.models.load_model(model_filename) #", "Fine Tuning') plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.xlabel('epoch') plt.show()", "train_image_generator = ImageDataGenerator() # Generator for our training data validation_image_generator", "1) plt.plot(epochs_range, acc, label='Training Accuracy') plt.plot(epochs_range, val_acc, label='Validation Accuracy') plt.legend(loc='lower", "label='Validation Loss') plt.ylim([0, 1.0]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='upper", "]: from tensorflow.keras.preprocessing import image #image directory containing images to", "model history = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=epochs, validation_data=val_data_gen,", "as np import matplotlib.pyplot as plt # In[ ]: AUTOTUNE", "loss = history.history['loss'] val_loss = history.history['val_loss'] epochs_range = range(epochs) plt.figure(figsize=(8,", "]: #Checking for correct cuda and tf versions from tensorflow.python.platform", "plt import os # In[ ]: tf.__version__ # In[ ]:", "0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy', metrics=['accuracy']) # In[ ]: #fit the model", "absolute_import, division, print_function, unicode_literals # In[ ]: #Checking for correct", "# In[ ]: model.compile(loss='categorical_crossentropy', optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy']) # In[", "x = tf.keras.layers.Dense(num_classes,activation='softmax')(x) model = tf.keras.models.Model(inputs=base_model.input, outputs=x) base_learning_rate = 0.001", "in enumerate(os.listdir(img_dir)): tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE)) tmpimage = np.expand_dims(tmpimage, axis=0).astype('float32')", "In[ ]: acc += history_fine.history['accuracy'] val_acc += history_fine.history['val_accuracy'] loss +=", "= validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), class_mode='categorical', classes = list(CLASS_NAMES)", "np.ceil(image_count/batch_size) epochs = 8 num_classes = len(CLASS_NAMES) #23 # In[", "#Importing all required libraries # In[ ]: from __future__ import", "tf.keras.models.load_model(model_filename) # Show the model architecture new_model.summary() # In[ ]:", "from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Conv2D, Flatten,", "Accuracy') plt.plot(epochs_range, val_acc, label='Validation Accuracy') plt.legend(loc='lower right') plt.title('Training and Validation", "import Image import numpy as np import matplotlib.pyplot as plt", "want to train the base model # In[ ]: #", "parameter for training batch_size = 32 IMG_HEIGHT = 224 IMG_WIDTH", "#!/usr/bin/env python # coding: utf-8 # In[ ]: #Importing all", "label='Start Fine Tuning') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(2,", "PIL import Image import numpy as np import matplotlib.pyplot as", "In[ ]: #Define parameter for training batch_size = 32 IMG_HEIGHT", "In[ ]: model.summary() # In[ ]: #Fine tune step initial_epochs", "\"\\\\hyper-kvasir\\\\splits\\\\all\\\\1\" test_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\0\" # In[ ]: train_data_dir = pathlib.Path(train_data_dir)", "+= history_fine.history['val_loss'] # In[ ]: #Plot fine tuning plt.figure(figsize=(8, 8))", "#count how many images are there image_count = len(list(train_data_dir.glob('*/*.jpg'))) image_count", "print(tf_build_info.cuda_version_number) # 9.0 in v1.10.0 print(tf_build_info.cudnn_version_number) # 7 in v1.10.0", "label='Validation Accuracy') plt.ylim([0.8, 1]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='lower", "Tuning') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(2, 1, 2)", "import matplotlib.pyplot as plt import os # In[ ]: tf.__version__", "total_epochs = initial_epochs + fine_tune_epochs train_batches = total_train // batch_size", "the class names CLASS_NAMES = np.array([item.name for item in train_data_dir.glob('*')", "validation_steps=total_val // batch_size ) # In[ ]: #create training plots", "target_size=(IMG_HEIGHT, IMG_WIDTH), class_mode='categorical', classes = list(CLASS_NAMES) ) #get class order", "layer in base_model.layers[:fine_tune_at]: layer.trainable = False # In[ ]: model.compile(loss='categorical_crossentropy',", "]: import IPython.display as display from PIL import Image import", "// batch_size) validation_batches = total_val // batch_size history_fine = model.fit_generator(", "and prepare them for the training train_image_generator = ImageDataGenerator() #", "plt.xlabel('epoch') plt.show() # In[ ]: #model save and load import", "this case base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False, weights='imagenet') base_model.trainable = False", "train the base model # In[ ]: # How many", "image #image directory containing images to test img_dir=\"\\\\polyps\" for i,img", "train_data_dir = pathlib.Path(train_data_dir) test_data_dir = pathlib.Path(test_data_dir) # In[ ]: #count", "#To apply the model on new data new_model = tf.keras.models.load_model(model_filename)", "False # In[ ]: #add new classification layer x =", "and load import os # In[ ]: #some time stamp", "pre-trained model. Resnet 50 in this case base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE,", "v1.10.0 # In[ ]: import tensorflow as tf import pathlib", "folder train_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\1\" test_data_dir = \"\\\\hyper-kvasir\\\\splits\\\\all\\\\0\" # In[ ]:", "and tf versions from tensorflow.python.platform import build_info as tf_build_info print(tf_build_info.cuda_version_number)", "= pathlib.Path(train_data_dir) test_data_dir = pathlib.Path(test_data_dir) # In[ ]: #count how", "the training train_image_generator = ImageDataGenerator() # Generator for our training", "for the training train_image_generator = ImageDataGenerator() # Generator for our", "import os # In[ ]: #some time stamp from datetime", "Validation Accuracy') plt.subplot(1, 2, 2) plt.plot(epochs_range, loss, label='Training Loss') plt.plot(epochs_range,", "Validation Accuracy') plt.subplot(2, 1, 2) plt.plot(loss, label='Training Loss') plt.plot(val_loss, label='Validation", "classification layer x = base_model.output x = tf.keras.layers.GlobalAveragePooling2D()(x) x =", "Accuracy') plt.ylim([0.8, 1]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='lower right')", "#create training plots history acc = history.history['accuracy'] val_acc = history.history['val_accuracy']", "import IPython.display as display from PIL import Image import numpy", "# In[ ]: import IPython.display as display from PIL import", "Freeze all the layers before the fine tune starting layer", "label='Start Fine Tuning') plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.xlabel('epoch')", "train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), classes = list(CLASS_NAMES),", "#add new classification layer x = base_model.output x = tf.keras.layers.GlobalAveragePooling2D()(x)", "]: # How many layers are in the base model", "plt.plot(val_acc, label='Validation Accuracy') plt.ylim([0.8, 1]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning')", "training train_image_generator = ImageDataGenerator() # Generator for our training data", "validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[ ]: #create training", "step initial_epochs = 7 fine_tune_epochs = 3 total_epochs = initial_epochs", "and Validation Loss') plt.show() # In[ ]: base_model.trainable = True", "train_data_gen, steps_per_epoch=total_train // batch_size, epochs=total_epochs, initial_epoch = history.epoch[-1], validation_data=val_data_gen, validation_steps=total_val", "In[ ]: # How many layers are in the base", "In[ ]: #model save and load import os # In[", "]: train_data_dir = pathlib.Path(train_data_dir) test_data_dir = pathlib.Path(test_data_dir) # In[ ]:", "history_fine = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=total_epochs, initial_epoch =", "# How many layers are in the base model print(\"Layers", "and time. now = datetime.now() timestamp = datetime.timestamp(now) print(\"timestamp =\",", "loss += history_fine.history['loss'] val_loss += history_fine.history['val_loss'] # In[ ]: #Plot", "# In[ ]: #We use image data generators to load", "]: #Fine tune step initial_epochs = 7 fine_tune_epochs = 3", "len(list(train_data_dir.glob('*/*.jpg'))) total_val = len(list(test_data_dir.glob('*/*.jpg'))) # In[ ]: #get the class", "from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D from tensorflow.keras.preprocessing.image", "in this case base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False, weights='imagenet') base_model.trainable =", "import os import numpy as np import matplotlib.pyplot as plt", "Validation Loss') plt.xlabel('epoch') plt.show() # In[ ]: #model save and", "base model # In[ ]: # How many layers are", "many images are there image_count = len(list(train_data_dir.glob('*/*.jpg'))) image_count # In[", ") val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), class_mode='categorical', classes", "]: #We use image data generators to load the images", "# In[ ]: model.summary() # In[ ]: #Fine tune step", "batch_size ) # In[ ]: acc += history_fine.history['accuracy'] val_acc +=", "history.epoch[-1], validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[ ]: acc", "and Validation Loss') plt.xlabel('epoch') plt.show() # In[ ]: #model save", "datetime.now() timestamp = datetime.timestamp(now) print(\"timestamp =\", timestamp) # In[ ]:", "test img_dir=\"\\\\polyps\" for i,img in enumerate(os.listdir(img_dir)): tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE))", "50 in this case base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False, weights='imagenet') base_model.trainable", "all the layers before the fine tune starting layer for", "v1.10.0 print(tf_build_info.cudnn_version_number) # 7 in v1.10.0 # In[ ]: import", "layer x = base_model.output x = tf.keras.layers.GlobalAveragePooling2D()(x) x = tf.keras.layers.Dense(num_classes,activation='softmax')(x)", "loss, label='Training Loss') plt.plot(epochs_range, val_loss, label='Validation Loss') plt.legend(loc='upper right') plt.title('Training", "plt # In[ ]: AUTOTUNE = tf.data.experimental.AUTOTUNE # In[ ]:", "]: base_model.trainable = True #now we want to train the", "to test img_dir=\"\\\\polyps\" for i,img in enumerate(os.listdir(img_dir)): tmpimage = image.load_img(os.path.join(img_dir,img),", "our validation data train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH),", "tf import pathlib from tensorflow.keras.models import Sequential from tensorflow.keras.layers import", "from layer x fine_tune_at = 100 # Freeze all the", "STEPS_PER_EPOCH = np.ceil(image_count/batch_size) epochs = 8 num_classes = len(CLASS_NAMES) #23", "all required libraries # In[ ]: from __future__ import absolute_import,", "history_fine.history['accuracy'] val_acc += history_fine.history['val_accuracy'] loss += history_fine.history['loss'] val_loss += history_fine.history['val_loss']", "plt.ylim([0, 1.0]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='upper right') plt.title('Training", "tune from layer x fine_tune_at = 100 # Freeze all", "layers before the fine tune starting layer for layer in", "=\", timestamp) # In[ ]: mode_filename = str(timestamp)+'mymodel.h5' model.save(model_filename) #", "]: total_train = len(list(train_data_dir.glob('*/*.jpg'))) total_val = len(list(test_data_dir.glob('*/*.jpg'))) # In[ ]:", "9.0 in v1.10.0 print(tf_build_info.cudnn_version_number) # 7 in v1.10.0 # In[", "model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy', metrics=['accuracy']) # In[ ]: #fit the model history", "as plt import os # In[ ]: tf.__version__ # In[", "train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), classes = list(CLASS_NAMES), class_mode='categorical' )", "How many layers are in the base model print(\"Layers base", "len(CLASS_NAMES) #23 # In[ ]: #We use image data generators", "= 7 fine_tune_epochs = 3 total_epochs = initial_epochs + fine_tune_epochs", "Flatten, Dropout, MaxPooling2D from tensorflow.keras.preprocessing.image import ImageDataGenerator import os import", "# In[ ]: mode_filename = str(timestamp)+'mymodel.h5' model.save(model_filename) # In[ ]:", "them for the training train_image_generator = ImageDataGenerator() # Generator for", "import matplotlib.pyplot as plt # In[ ]: AUTOTUNE = tf.data.experimental.AUTOTUNE", "label='Training Loss') plt.plot(val_loss, label='Validation Loss') plt.ylim([0, 1.0]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start", "in v1.10.0 print(tf_build_info.cudnn_version_number) # 7 in v1.10.0 # In[ ]:", "]: from __future__ import absolute_import, division, print_function, unicode_literals # In[", "image_count # In[ ]: total_train = len(list(train_data_dir.glob('*/*.jpg'))) total_val = len(list(test_data_dir.glob('*/*.jpg')))", "In[ ]: #fit the model history = model.fit_generator( train_data_gen, steps_per_epoch=total_train", "tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE)) tmpimage = np.expand_dims(tmpimage, axis=0).astype('float32') result_class=new_model.predict(tmpimage) print(img,\";\",CLASS_NAMES[result_class.argmax(axis=-1)])", "Sequential from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D from", "In[ ]: #Fine tune step initial_epochs = 7 fine_tune_epochs =", "plt.ylim([0.8, 1]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='lower right') plt.title('Training", "os # In[ ]: #some time stamp from datetime import", "In[ ]: #We use image data generators to load the", "// batch_size ) # In[ ]: #create training plots history", "IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3) # base model from the", "# coding: utf-8 # In[ ]: #Importing all required libraries", "tf_build_info print(tf_build_info.cuda_version_number) # 9.0 in v1.10.0 print(tf_build_info.cudnn_version_number) # 7 in", "]: mode_filename = str(timestamp)+'mymodel.h5' model.save(model_filename) # In[ ]: #To apply", "many layers are in the base model print(\"Layers base model:", "x fine_tune_at = 100 # Freeze all the layers before", "tune step initial_epochs = 7 fine_tune_epochs = 3 total_epochs =", "stamp from datetime import datetime # current date and time.", "python # coding: utf-8 # In[ ]: #Importing all required", "IMG_WIDTH = 224 STEPS_PER_EPOCH = np.ceil(image_count/batch_size) epochs = 8 num_classes", "training batch_size = 32 IMG_HEIGHT = 224 IMG_WIDTH = 224", "validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[ ]: acc +=", "= datetime.now() timestamp = datetime.timestamp(now) print(\"timestamp =\", timestamp) # In[", "tf.data.experimental.AUTOTUNE # In[ ]: import IPython.display as display from PIL", "In[ ]: #create training plots history acc = history.history['accuracy'] val_acc", "# In[ ]: #Define parameter for training batch_size = 32", "IMG_SIZE = 224 IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3) # base", "plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(1, 2, 2) plt.plot(epochs_range,", "np import matplotlib.pyplot as plt import os # In[ ]:", "= tf.keras.layers.Dense(num_classes,activation='softmax')(x) model = tf.keras.models.Model(inputs=base_model.input, outputs=x) base_learning_rate = 0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate),", "plt.subplot(1, 2, 1) plt.plot(epochs_range, acc, label='Training Accuracy') plt.plot(epochs_range, val_acc, label='Validation", "import numpy as np import matplotlib.pyplot as plt import os", "len(list(test_data_dir.glob('*/*.jpg'))) # In[ ]: #get the class names CLASS_NAMES =", "model = tf.keras.models.Model(inputs=base_model.input, outputs=x) base_learning_rate = 0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy', metrics=['accuracy'])", "1) plt.plot(acc, label='Training Accuracy') plt.plot(val_acc, label='Validation Accuracy') plt.ylim([0.8, 1]) plt.plot([initial_epochs-1,initial_epochs-1],", "= model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=total_epochs, initial_epoch = history.epoch[-1],", "for i,img in enumerate(os.listdir(img_dir)): tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE)) tmpimage =", "pathlib.Path(test_data_dir) # In[ ]: #count how many images are there", "history_fine.history['val_loss'] # In[ ]: #Plot fine tuning plt.figure(figsize=(8, 8)) plt.subplot(2,", "2, 1) plt.plot(epochs_range, acc, label='Training Accuracy') plt.plot(epochs_range, val_acc, label='Validation Accuracy')", "plt.subplot(1, 2, 2) plt.plot(epochs_range, loss, label='Training Loss') plt.plot(epochs_range, val_loss, label='Validation", "= tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False, weights='imagenet') base_model.trainable = False # In[ ]:", "tuning plt.figure(figsize=(8, 8)) plt.subplot(2, 1, 1) plt.plot(acc, label='Training Accuracy') plt.plot(val_acc,", "# current date and time. now = datetime.now() timestamp =", "7 in v1.10.0 # In[ ]: import tensorflow as tf", "directories print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys()) # In[ ]: IMG_SIZE = 224 IMG_SHAPE", "Resnet 50 in this case base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False, weights='imagenet')", "validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), class_mode='categorical', classes = list(CLASS_NAMES) )", "# In[ ]: #Importing all required libraries # In[ ]:", "]: model.compile(loss='categorical_crossentropy', optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy']) # In[ ]: model.summary()", "= tf.keras.models.Model(inputs=base_model.input, outputs=x) base_learning_rate = 0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy', metrics=['accuracy']) #", "import Sequential from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D", "train_data_gen, steps_per_epoch=total_train // batch_size, epochs=epochs, validation_data=val_data_gen, validation_steps=total_val // batch_size )", "datetime # current date and time. now = datetime.now() timestamp", "train_data_dir.glob('*') if item.name != \"LICENSE.txt\"]) CLASS_NAMES # In[ ]: #Define", "item.name != \"LICENSE.txt\"]) CLASS_NAMES # In[ ]: #Define parameter for", "In[ ]: #add new classification layer x = base_model.output x", "Loss') plt.show() # In[ ]: base_model.trainable = True #now we", "= 100 # Freeze all the layers before the fine", "]: model.summary() # In[ ]: #Fine tune step initial_epochs =", "batch_size history_fine = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=total_epochs, initial_epoch", "plt.show() # In[ ]: #model save and load import os", "from tensorflow.keras.preprocessing import image #image directory containing images to test", "validation data train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), classes", "training data validation_image_generator = ImageDataGenerator() # Generator for our validation", "#Plot fine tuning plt.figure(figsize=(8, 8)) plt.subplot(2, 1, 1) plt.plot(acc, label='Training", ") #get class order from directories print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys()) # In[", "history_fine.history['loss'] val_loss += history_fine.history['val_loss'] # In[ ]: #Plot fine tuning", "coding: utf-8 # In[ ]: #Importing all required libraries #", "from PIL import Image import numpy as np import matplotlib.pyplot", "class order from directories print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys()) # In[ ]: IMG_SIZE", "images to test img_dir=\"\\\\polyps\" for i,img in enumerate(os.listdir(img_dir)): tmpimage =", "val_loss, label='Validation Loss') plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.show()", "before the fine tune starting layer for layer in base_model.layers[:fine_tune_at]:", "Dropout, MaxPooling2D from tensorflow.keras.preprocessing.image import ImageDataGenerator import os import numpy", "# In[ ]: acc += history_fine.history['accuracy'] val_acc += history_fine.history['val_accuracy'] loss", "# Show the model architecture new_model.summary() # In[ ]: from", "plt.plot(val_loss, label='Validation Loss') plt.ylim([0, 1.0]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning')", "# In[ ]: AUTOTUNE = tf.data.experimental.AUTOTUNE # In[ ]: import", "pathlib from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Conv2D,", "IMG_HEIGHT = 224 IMG_WIDTH = 224 STEPS_PER_EPOCH = np.ceil(image_count/batch_size) epochs", "tensorflow as tf import pathlib from tensorflow.keras.models import Sequential from", "plt.plot(epochs_range, acc, label='Training Accuracy') plt.plot(epochs_range, val_acc, label='Validation Accuracy') plt.legend(loc='lower right')", "batch_size = 32 IMG_HEIGHT = 224 IMG_WIDTH = 224 STEPS_PER_EPOCH", "# In[ ]: #Checking for correct cuda and tf versions" ]
[ "{ 'target_name': 'memdump', 'type': 'none', 'dependencies': [ 'memdump/memdump.gyp:memdump', ], },", "license that can be # found in the LICENSE file.", "the LICENSE file. { 'targets': [ # Intermediate target grouping", "native # unittests and instrumentation test apks. { 'target_name': 'android_tools',", "apks. { 'target_name': 'android_tools', 'type': 'none', 'dependencies': [ 'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2',", "# Intermediate target grouping the android tools needed to run", "'memdump', 'type': 'none', 'dependencies': [ 'memdump/memdump.gyp:memdump', ], }, { 'target_name':", "found in the LICENSE file. { 'targets': [ # Intermediate", "run native # unittests and instrumentation test apks. { 'target_name':", "BSD-style license that can be # found in the LICENSE", "{ 'target_name': 'memconsumer', 'type': 'none', 'dependencies': [ 'memconsumer/memconsumer.gyp:memconsumer', ], },", "# Use of this source code is governed by a", "by a BSD-style license that can be # found in", "is governed by a BSD-style license that can be #", "# Copyright (c) 2012 The Chromium Authors. All rights reserved.", "], }, { 'target_name': 'memdump', 'type': 'none', 'dependencies': [ 'memdump/memdump.gyp:memdump',", "of this source code is governed by a BSD-style license", "'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ], }, { 'target_name': 'memdump', 'type': 'none', 'dependencies': [", "{ 'target_name': 'android_tools', 'type': 'none', 'dependencies': [ 'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum',", "}, { 'target_name': 'memconsumer', 'type': 'none', 'dependencies': [ 'memconsumer/memconsumer.gyp:memconsumer', ],", "'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ], }, { 'target_name': 'memdump', 'type': 'none', 'dependencies':", "The Chromium Authors. All rights reserved. # Use of this", "reserved. # Use of this source code is governed by", "(c) 2012 The Chromium Authors. All rights reserved. # Use", "2012 The Chromium Authors. All rights reserved. # Use of", "[ # Intermediate target grouping the android tools needed to", "'android_tools', 'type': 'none', 'dependencies': [ 'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ],", "{ 'targets': [ # Intermediate target grouping the android tools", "in the LICENSE file. { 'targets': [ # Intermediate target", "code is governed by a BSD-style license that can be", "grouping the android tools needed to run native # unittests", "a BSD-style license that can be # found in the", "'none', 'dependencies': [ 'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ], }, {", "to run native # unittests and instrumentation test apks. {", "and instrumentation test apks. { 'target_name': 'android_tools', 'type': 'none', 'dependencies':", "LICENSE file. { 'targets': [ # Intermediate target grouping the", "governed by a BSD-style license that can be # found", "the android tools needed to run native # unittests and", "<reponame>SlimKatLegacy/android_external_chromium_org # Copyright (c) 2012 The Chromium Authors. All rights", "Use of this source code is governed by a BSD-style", "tools needed to run native # unittests and instrumentation test", "All rights reserved. # Use of this source code is", "test apks. { 'target_name': 'android_tools', 'type': 'none', 'dependencies': [ 'adb_reboot/adb_reboot.gyp:adb_reboot',", "file. { 'targets': [ # Intermediate target grouping the android", "'target_name': 'memconsumer', 'type': 'none', 'dependencies': [ 'memconsumer/memconsumer.gyp:memconsumer', ], }, ],", "'memdump/memdump.gyp:memdump', ], }, { 'target_name': 'memconsumer', 'type': 'none', 'dependencies': [", "'memconsumer', 'type': 'none', 'dependencies': [ 'memconsumer/memconsumer.gyp:memconsumer', ], }, ], }", "'target_name': 'memdump', 'type': 'none', 'dependencies': [ 'memdump/memdump.gyp:memdump', ], }, {", "Copyright (c) 2012 The Chromium Authors. All rights reserved. #", "needed to run native # unittests and instrumentation test apks.", "# unittests and instrumentation test apks. { 'target_name': 'android_tools', 'type':", "'type': 'none', 'dependencies': [ 'memdump/memdump.gyp:memdump', ], }, { 'target_name': 'memconsumer',", "}, { 'target_name': 'memdump', 'type': 'none', 'dependencies': [ 'memdump/memdump.gyp:memdump', ],", "'dependencies': [ 'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ], }, { 'target_name':", "unittests and instrumentation test apks. { 'target_name': 'android_tools', 'type': 'none',", "instrumentation test apks. { 'target_name': 'android_tools', 'type': 'none', 'dependencies': [", "rights reserved. # Use of this source code is governed", "'dependencies': [ 'memdump/memdump.gyp:memdump', ], }, { 'target_name': 'memconsumer', 'type': 'none',", "[ 'memdump/memdump.gyp:memdump', ], }, { 'target_name': 'memconsumer', 'type': 'none', 'dependencies':", "# found in the LICENSE file. { 'targets': [ #", "Chromium Authors. All rights reserved. # Use of this source", "Intermediate target grouping the android tools needed to run native", "'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ], }, { 'target_name': 'memdump', 'type': 'none',", "'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ], }, { 'target_name': 'memdump', 'type':", "'targets': [ # Intermediate target grouping the android tools needed", "that can be # found in the LICENSE file. {", "this source code is governed by a BSD-style license that", "[ 'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ], }, { 'target_name': 'memdump',", "'none', 'dependencies': [ 'memdump/memdump.gyp:memdump', ], }, { 'target_name': 'memconsumer', 'type':", "android tools needed to run native # unittests and instrumentation", "target grouping the android tools needed to run native #", "], }, { 'target_name': 'memconsumer', 'type': 'none', 'dependencies': [ 'memconsumer/memconsumer.gyp:memconsumer',", "source code is governed by a BSD-style license that can", "Authors. All rights reserved. # Use of this source code", "be # found in the LICENSE file. { 'targets': [", "'type': 'none', 'dependencies': [ 'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ], },", "can be # found in the LICENSE file. { 'targets':", "'target_name': 'android_tools', 'type': 'none', 'dependencies': [ 'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem'," ]
[ "a tx containing 2 op_call outputs calling inc() def many_calls_in_same_tx_test(self):", "self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True) # Sends a tx containing 2 op_call", "1000000) self.contract_address = contract_data['address'] block_height = self.node.getblockcount() self.node.generate(1) sync_blocks(self.nodes) for", "len(gas_limit) < 20: outputs.append(make_op_call_output(0, b\"\\x04\", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs,", "self.gas_limit_signedness_test() print(\"Checking gas price signedness\") self.gas_price_signedness_test() print(\"Checking gas limit and", "tx_i in range(len(unspents)): if int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']: break", "int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) # Deploy the testing", "pay for its potential execution costs in the same way", "self.two_calls_in_same_tx_exceeding_tx_fee_test() print(\"Mining a block with 100 txs each with an", "== counter_should_increase_by) # Deploy the testing contract def create_contract_test(self): \"\"\"", "def send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs, counter_should_increase_by): # 61bc221a counter() old_out =", "range(200)}) print(\"Creating contract\") self.create_contract_test() print(\"Calling inc() in two outputs\") self.many_calls_in_same_tx_test()", "BitcoinTestFramework from test_framework.util import * from test_framework.script import * from", "100000000) def run_test(self): self.node = self.nodes[0] connect_nodes(self.nodes[0], 1) self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany(\"\",", "containing 1 op_call output where txfee == gas_price*gas_limit. def gas_equal_to_tx_fee_test(self):", "gas limit calling inc() def gas_limit_signedness_test(self): outputs = [] gas_limit", "in range(2): # 61bc221a counter() out = int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16)", "outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True) #", "CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True) # Sends a", "op_call txs def send_100_txs_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000),", "counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) i = 0 unspents", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100) def send_tx_with_value_test(self): outputs =", "i in range(200)}) print(\"Creating contract\") self.create_contract_test() print(\"Calling inc() in two", "= make_vin(self.node, input_value) tx = make_transaction(self.node, [inpt], outputs) if should_throw:", "each with an output calling inc()\") self.send_100_txs_test() print(\"Checking that the", "= int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) i = 0 unspents = self.node.listunspent()", "CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True) # Sends a", "developers # Distributed under the MIT software license, see the", "def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args =", "counter; function inc() public { counter += 1; } function", "print(\"Creating contract\") self.create_contract_test() print(\"Calling inc() in two outputs\") self.many_calls_in_same_tx_test() print(\"Calling", "import sys class OpCallTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes", "outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True) #", "# Distributed under the MIT software license, see the accompanying", "CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE) # Sends a", "is valid\") self.gas_limit_signedness_one_valid_test() print(\"Checking gas limit signedness\") self.gas_limit_signedness_test() print(\"Checking gas", "# d0e30db0 deposit() outputs.append(make_op_call_output(100000000, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"d0e30db0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs,", "Sends a tx containing 1 op_call output where txfee <", "one tx is valid\") self.gas_limit_signedness_one_valid_test() print(\"Checking gas limit signedness\") self.gas_limit_signedness_test()", "== 100000000) def run_test(self): self.node = self.nodes[0] connect_nodes(self.nodes[0], 1) self.nodes[0].generate(200+COINBASE_MATURITY)", "True self.num_nodes = 2 self.extra_args = [['-txindex=1']]*2 def send_one_op_call_tx_with_counter_check(self, outputs,", "1+NUM_DEFAULT_DGP_CONTRACTS) # Sends a tx containing 2 op_call outputs calling", "b\"\\x04\", b\"\\x01\\x00\", b\"\\xff\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000) # sends", "CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100) def send_tx_with_value_test(self): outputs", "exceeding the tx fee. # This tx should be rejected", "txfee < gas_limit*gas_price\") self.two_calls_in_same_tx_exceeding_tx_fee_test() print(\"Mining a block with 100 txs", "negative gas limit calling inc() def gas_limit_signedness_test(self): outputs = []", "range(2): # 61bc221a counter() out = int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out", "[] # d0e30db0 deposit() outputs.append(make_op_call_output(100000000, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"d0e30db0\"), bytes.fromhex(self.contract_address)))", "# This tx should be rejected since executing such a", "gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000))", "gas limit and gas price signedness\") self.gas_limit_and_price_signedness_test() if __name__ ==", "has a combined gas_price*gas_limit exceeding the tx fee. # This", "with 100 txs each with an output calling inc()\") self.send_100_txs_test()", "unspents[tx_i]['vout']), nSequence=0) tx = make_transaction(self.node, [inpt], outputs) txid = self.node.sendrawtransaction(tx)", "if int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']: break else: assert(False) inpt", "break else: assert(False) inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']), nSequence=0) tx", "b\"\\x04\", b\"\\xff\\xff\", b\"\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff) # Sends", "counter_should_increase_by) # Deploy the testing contract def create_contract_test(self): \"\"\" pragma", "output with txfee < gas_limit*gas_price\") self.two_calls_in_same_tx_exceeding_tx_fee_test() print(\"Mining a block with", "counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE) # 12065fe0 getBalance() balance = int(self.node.callcontract(self.contract_address, \"12065fe0\")['executionResult']['output'], 16)", "outputs, 100) def send_tx_with_value_test(self): outputs = [] # d0e30db0 deposit()", "self.node.createcontract(\"6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029\", 1000000) self.contract_address = contract_data['address'] block_height = self.node.getblockcount() self.node.generate(1) sync_blocks(self.nodes)", "num_txs and len(unspents) > 0: # Select as input a", "gas limit and price calling inc() def gas_limit_and_price_signedness_test(self): outputs =", "+= 1; } function getBalance() public { return this.balance; }", "for i in range(2): assert(self.nodes[i].getblockcount() == block_height+1) assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS)", "b\"\\xff\" # sends a tx containing 1 op_call output with", "print(e) pass else: self.node.sendrawtransaction(tx) self.node.generate(1) sync_blocks(self.nodes) for i in range(2):", "execution costs in the same way as a tx with", "16) inpt = make_vin(self.node, input_value) tx = make_transaction(self.node, [inpt], outputs)", "a tx containing 1 op_call output where txfee == gas_price*gas_limit.", "print(\"Calling inc() in one output\") self.normal_op_call_output_test() print(\"Calling inc() in one", "getBalance() public { return this.balance; } } \"\"\" contract_data =", "block_height = self.node.getblockcount() self.node.generate(1) sync_blocks(self.nodes) for i in range(2): assert(self.nodes[i].getblockcount()", "txs def send_100_txs_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE),", "with txfee < gas_limit*gas_price\") self.two_calls_in_same_tx_exceeding_tx_fee_test() print(\"Mining a block with 100", "2 self.extra_args = [['-txindex=1']]*2 def send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0, input_value=500000000, should_throw=False):", "txid = self.node.sendrawtransaction(tx) unspents = self.node.listunspent() i += 1 self.node.generate(1)", "getBalance() balance = int(self.node.callcontract(self.contract_address, \"12065fe0\")['executionResult']['output'], 16) assert(balance == 100000000) def", "import BitcoinTestFramework from test_framework.util import * from test_framework.script import *", "i in range(2): # 61bc221a counter() out = int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'],", "1 op_call output with a (if interpreted with a signed", "tx fee. # This tx should be rejected since executing", "= [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100, outputs,", "print(\"Second test of inc() in one outputs with txfee <", "out = int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) def send_multiple_op_call_txs_with_counter_check(self,", "> 0: # Select as input a tx which has", "= self.node.getblockcount() self.node.generate(1) sync_blocks(self.nodes) for i in range(2): assert(self.nodes[i].getblockcount() ==", "self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [['-txindex=1']]*2 def", "op_call output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_1_test(self): outputs =", "output calling inc()\") self.send_100_txs_test() print(\"Checking that the value of txs", "int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) i = 0 unspents = self.node.listunspent() while", "in one output with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_1_test() print(\"Second test", "fee. # This tx should be rejected since executing such", "where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_1_test(self): outputs = [] outputs.append(make_op_call_output(0,", "\"12065fe0\")['executionResult']['output'], 16) assert(balance == 100000000) def run_test(self): self.node = self.nodes[0]", "= CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']), nSequence=0) tx = make_transaction(self.node, [inpt], outputs)", "from test_framework.util import * from test_framework.script import * from test_framework.mininode", "print(\"Calling inc() in one output with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_1_test()", "counter_should_increase_by): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) i", "under the MIT software license, see the accompanying # file", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000) # sends a tx containing 1", "a (if interpreted with a signed integer) negative gas limit", "counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) inpt = make_vin(self.node, input_value)", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000)) gas_limit", "where txfee == gas_price*gas_limit. def gas_equal_to_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0,", "outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE) #", "inpt = make_vin(self.node, input_value) tx = make_transaction(self.node, [inpt], outputs) if", "counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE) # Sends a tx containing 1 op_call output", "rejected since executing such a tx would be unable to", "negative gas limit calling inc() def gas_limit_signedness_one_valid_test(self): outputs = []", "num_txs, outputs, counter_should_increase_by): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'],", "outputs, counter_should_increase_by): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16)", "contract Example { uint public counter; function inc() public {", "self.nodes[0] connect_nodes(self.nodes[0], 1) self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany(\"\", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE / Decimal('100000000') for", "print(\"Second test of inc() in one output with txfee <", "print(\"Calling inc() in one output with txfee equal to gas_limit*gas_price\")", "tx containing 2 op_call outputs calling inc() def many_calls_in_same_tx_test(self): outputs", "a tx containing 2 op_call outputs that has a combined", "except JSONRPCException as e: print(e) pass else: self.node.sendrawtransaction(tx) self.node.generate(1) sync_blocks(self.nodes)", "gas_equal_to_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True) #", "correctly updated\") self.send_tx_with_value_test() print(\"Checking gas limit signedness where one tx", "for i in range(2): # 61bc221a counter() out = int(self.nodes[i].callcontract(self.contract_address,", "which has at least 5 fantasygold spendable for tx_i in", "value of txs are correctly updated\") self.send_tx_with_value_test() print(\"Checking gas limit", "print(\"Checking gas limit signedness\") self.gas_limit_signedness_test() print(\"Checking gas price signedness\") self.gas_price_signedness_test()", "= b\"\\xff\" outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\\x00\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\",", "while i < num_txs and len(unspents) > 0: # Select", "= [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\x7f\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1,", "counter_should_increase_by=0, input_value=500000000, should_throw=False): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'],", "self.extra_args = [['-txindex=1']]*2 def send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0, input_value=500000000, should_throw=False): #", "for tx_i in range(len(unspents)): if int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']:", "output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_1_test(self): outputs = []", "inc() def many_calls_in_same_tx_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE),", "= [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", b\"\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True,", "inc() public { counter += 1; } function getBalance() public", "1000000000)) gas_limit += b\"\\xff\" # sends a tx containing 1", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True) # Sends a tx containing 2", "import * from test_framework.fantasygoldconfig import * import sys class OpCallTest(BitcoinTestFramework):", "import * from test_framework.fantasygold import * from test_framework.fantasygoldconfig import *", "tx would be unable to pay for its potential execution", "pass else: self.node.sendrawtransaction(tx) self.node.generate(1) sync_blocks(self.nodes) for i in range(2): #", "# file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework from", "12065fe0 getBalance() balance = int(self.node.callcontract(self.contract_address, \"12065fe0\")['executionResult']['output'], 16) assert(balance == 100000000)", "self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE) # Sends a normal raw op_call tx", "op_call tx with a single output. def normal_op_call_output_test(self): outputs =", "interpreted with a signed integer) negative gas limit calling inc()", "[] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000),", "with one output where txfee < gas_price*gas_limit. def two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs", "# Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed", "1) self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany(\"\", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE / Decimal('100000000') for i in", "def gas_exceeding_tx_fee_100001_1_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"),", "e: print(e) pass else: self.node.sendrawtransaction(tx) self.node.generate(1) sync_blocks(self.nodes) for i in", "where txfee < gas_price*gas_limit. def two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0,", "bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE)", "one output with txfee < gas_limit*gas_price\") self.two_calls_in_same_tx_exceeding_tx_fee_test() print(\"Mining a block", "test_framework.fantasygold import * from test_framework.fantasygoldconfig import * import sys class", "and len(unspents) > 0: # Select as input a tx", "file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework from test_framework.util", "# Sends a tx containing 2 op_call outputs calling inc()", "input_value=0x7fff*FGC_MIN_GAS_PRICE) # Sends a tx containing 1 op_call output where", "b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE) # Sends", "import * import sys class OpCallTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain =", "def normal_op_call_output_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\x7f\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"),", "[inpt], outputs) txid = self.node.sendrawtransaction(tx) unspents = self.node.listunspent() i +=", "JSONRPCException as e: print(e) pass else: self.node.sendrawtransaction(tx) self.node.generate(1) sync_blocks(self.nodes) for", "self.many_calls_in_same_tx_test() print(\"Calling inc() in one output\") self.normal_op_call_output_test() print(\"Calling inc() in", "self.contract_address = contract_data['address'] block_height = self.node.getblockcount() self.node.generate(1) sync_blocks(self.nodes) for i", "< gas_price*gas_limit. def gas_exceeding_tx_fee_100001_1_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(10000001),", "counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE) # Sends a normal raw op_call tx with", "Sends 100 valid op_call txs def send_100_txs_test(self): outputs = []", "fantasygold spendable for tx_i in range(len(unspents)): if int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE", "calling inc() def gas_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\x01\\x00\",", "should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE) # sends a tx containing 1 op_call output", "* import sys class OpCallTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True", "an output calling inc()\") self.send_100_txs_test() print(\"Checking that the value of", "= self.nodes[0] connect_nodes(self.nodes[0], 1) self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany(\"\", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE / Decimal('100000000')", "def gas_equal_to_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"),", "txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_1_test() print(\"Second test of inc() in one", "range(2): assert(self.nodes[i].getblockcount() == block_height+1) assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS) # Sends a", "else: self.node.sendrawtransaction(tx) self.node.generate(1) sync_blocks(self.nodes) for i in range(2): # 61bc221a", "b\"\\x01\\x00\", b\"\\xff\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000) # sends a", "input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True) # Sends a tx containing 1 op_call output", "CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE) # Sends a", "should_throw: try: self.node.sendrawtransaction(tx) assert(False) except JSONRPCException as e: print(e) pass", "b\"\\x04\", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True) # Sends", "gas_limit*gas_price\") self.gas_equal_to_tx_fee_test() print(\"Calling inc() in one output with txfee <", "outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0,", "# Sends a tx containing 2 op_call outputs that has", "should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000)) gas_limit += b\"\\xff\" # sends", "from test_framework.script import * from test_framework.mininode import * from test_framework.fantasygold", "tx containing 1 op_call output where txfee < gas_price*gas_limit. def", "[] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True)", "100) def send_tx_with_value_test(self): outputs = [] # d0e30db0 deposit() outputs.append(make_op_call_output(100000000,", "b\"\\xff\\xff\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE) # sends a", "same way as a tx with one output where txfee", "calling inc() def many_calls_in_same_tx_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000),", "def create_contract_test(self): \"\"\" pragma solidity ^0.4.10; contract Example { uint", "block with 100 txs each with an output calling inc()\")", "tx = make_transaction(self.node, [inpt], outputs) if should_throw: try: self.node.sendrawtransaction(tx) assert(False)", "counter += 1; } function getBalance() public { return this.balance;", "should_throw=True, input_value=0xff*0xffff) # Sends 100 valid op_call txs def send_100_txs_test(self):", "self.node.sendmany(\"\", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE / Decimal('100000000') for i in range(200)}) print(\"Creating", "def gas_limit_and_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", b\"\\xff\", bytes.fromhex(\"371303c0\"),", "output with a (if interpreted with a signed integer) negative", "b\"\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff) # Sends 100 valid", "* from test_framework.fantasygoldconfig import * import sys class OpCallTest(BitcoinTestFramework): def", "= b\"\\xff\" while len(gas_limit) < 20: outputs.append(make_op_call_output(0, b\"\\x04\", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE),", "output with txfee equal to gas_limit*gas_price\") self.gas_equal_to_tx_fee_test() print(\"Calling inc() in", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE) # sends a tx containing", "limit signedness\") self.gas_limit_signedness_test() print(\"Checking gas price signedness\") self.gas_price_signedness_test() print(\"Checking gas", "input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True) # sends a tx containing 1 op_call output", "gas_limit_and_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", b\"\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "= [] gas_limit = b\"\\xff\" outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\\x00\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"),", "gas_limit*gas_price\") self.two_calls_in_same_tx_exceeding_tx_fee_test() print(\"Mining a block with 100 txs each with", "input a tx which has at least 5 fantasygold spendable", "its potential execution costs in the same way as a", "containing 1 op_call output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_1_test(self):", "input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000)) gas_limit += b\"\\xff\" # sends a", "outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", b\"\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff) #", "gas_exceeding_tx_fee_100001_2_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "in one outputs with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_2_test() print(\"Second test", "len(unspents) > 0: # Select as input a tx which", "self.gas_exceeding_tx_fee_100001_2_test() print(\"Second test of inc() in one output with txfee", "create_contract_test(self): \"\"\" pragma solidity ^0.4.10; contract Example { uint public", "limit calling inc() def gas_limit_signedness_test(self): outputs = [] gas_limit =", "gas price signedness\") self.gas_price_signedness_test() print(\"Checking gas limit and gas price", "license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from", "to gas_limit*gas_price\") self.gas_equal_to_tx_fee_test() print(\"Calling inc() in one output with txfee", "many_calls_in_same_tx_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "one output with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_1_test() print(\"Second test of", "gas_price*gas_limit exceeding the tx fee. # This tx should be", "or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework from test_framework.util import *", "a single output. def normal_op_call_output_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\",", "a tx containing 1 op_call output where txfee < gas_price*gas_limit.", "as input a tx which has at least 5 fantasygold", "gas_price*gas_limit. def gas_exceeding_tx_fee_100001_2_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE),", "tx containing 1 op_call output with a (if interpreted with", "+= b\"\\xff\" # sends a tx containing 1 op_call output", "outputs = [] # d0e30db0 deposit() outputs.append(make_op_call_output(100000000, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE),", "sys class OpCallTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes =", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True) # Sends a tx containing 1", "Sends a tx containing 2 op_call outputs calling inc() def", "for its potential execution costs in the same way as", "output. def normal_op_call_output_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\x7f\", CScriptNum(FGC_MIN_GAS_PRICE),", "61bc221a counter() out = int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by)", "sends a tx containing 1 op_call output with a (if", "test_framework.test_framework import BitcoinTestFramework from test_framework.util import * from test_framework.script import", "5 fantasygold spendable for tx_i in range(len(unspents)): if int(unspents[tx_i]['amount']*COIN) ==", "outputs) txid = self.node.sendrawtransaction(tx) unspents = self.node.listunspent() i += 1", "1 op_call output where txfee == gas_price*gas_limit. def gas_equal_to_tx_fee_test(self): outputs", "[] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\x01\\x00\", b\"\\xff\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000)", "would be unable to pay for its potential execution costs", "2 op_call outputs calling inc() def many_calls_in_same_tx_test(self): outputs = []", "outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs,", "class OpCallTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2", "normal_op_call_output_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\x7f\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0, input_value=500000000, should_throw=False): # 61bc221a counter() old_out =", "txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_2_test() print(\"Second test of inc() in one", "= [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\",", "= self.node.listunspent() i += 1 self.node.generate(1) sync_blocks(self.nodes) for i in", "b\"\\x04\", b\"\\xff\\xff\\x00\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"),", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2,", "# Select as input a tx which has at least", "gas limit calling inc() def gas_limit_signedness_one_valid_test(self): outputs = [] gas_limit", "be rejected since executing such a tx would be unable", "with a possible negative gas limit and price calling inc()", "integer) negative gas price calling inc() def gas_price_signedness_test(self): outputs =", "2 op_call outputs that has a combined gas_price*gas_limit exceeding the", "combined gas_price*gas_limit exceeding the tx fee. # This tx should", "import * from test_framework.mininode import * from test_framework.fantasygold import *", "limit and gas price signedness\") self.gas_limit_and_price_signedness_test() if __name__ == '__main__':", "output where txfee < gas_price*gas_limit. def two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs = []", "= [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE,", "assert(False) inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']), nSequence=0) tx = make_transaction(self.node,", "public { counter += 1; } function getBalance() public {", "b\"\\x04\", b\"\\xff\\x7f\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE) # Sends", "tx which has at least 5 fantasygold spendable for tx_i", "return this.balance; } } \"\"\" contract_data = self.node.createcontract(\"6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029\", 1000000) self.contract_address", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE) # Sends a tx", "outputs, counter_should_increase_by=0, input_value=500000000, should_throw=False): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address,", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE) # Sends a normal raw op_call", "int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']: break else: assert(False) inpt =", "= [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1,", "a normal raw op_call tx with a single output. def", "i = 0 unspents = self.node.listunspent() while i < num_txs", "in range(2): assert(self.nodes[i].getblockcount() == block_height+1) assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS) # Sends", "inc() in one output with txfee equal to gas_limit*gas_price\") self.gas_equal_to_tx_fee_test()", "import * from test_framework.script import * from test_framework.mininode import *", "= [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\x01\\x00\", b\"\\xff\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True,", "\"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) # Deploy the testing contract", "gas_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\x01\\x00\", b\"\\xff\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "calling inc()\") self.send_100_txs_test() print(\"Checking that the value of txs are", "input_value) tx = make_transaction(self.node, [inpt], outputs) if should_throw: try: self.node.sendrawtransaction(tx)", "= int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) inpt = make_vin(self.node, input_value) tx =", "solidity ^0.4.10; contract Example { uint public counter; function inc()", "one outputs with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_2_test() print(\"Second test of", "print(\"Checking that the value of txs are correctly updated\") self.send_tx_with_value_test()", "limit calling inc() def gas_limit_signedness_one_valid_test(self): outputs = [] gas_limit =", "tx containing 1 op_call output where txfee == gas_price*gas_limit. def", "outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE) #", "b\"\\x04\", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000),", "the testing contract def create_contract_test(self): \"\"\" pragma solidity ^0.4.10; contract", "= [] # d0e30db0 deposit() outputs.append(make_op_call_output(100000000, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"d0e30db0\"),", "b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE) # Sends", "Deploy the testing contract def create_contract_test(self): \"\"\" pragma solidity ^0.4.10;", "b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"d0e30db0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE) # 12065fe0", "containing 2 op_call outputs that has a combined gas_price*gas_limit exceeding", "self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000) # sends a tx containing 1 op_call", "tx is valid\") self.gas_limit_signedness_one_valid_test() print(\"Checking gas limit signedness\") self.gas_limit_signedness_test() print(\"Checking", "} function getBalance() public { return this.balance; } } \"\"\"", "#!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers", "{self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE / Decimal('100000000') for i in range(200)}) print(\"Creating contract\")", "= True self.num_nodes = 2 self.extra_args = [['-txindex=1']]*2 def send_one_op_call_tx_with_counter_check(self,", "Sends a tx containing 2 op_call outputs that has a", "in the same way as a tx with one output", "outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100,", "that the value of txs are correctly updated\") self.send_tx_with_value_test() print(\"Checking", "nSequence=0) tx = make_transaction(self.node, [inpt], outputs) txid = self.node.sendrawtransaction(tx) unspents", "valid\") self.gas_limit_signedness_one_valid_test() print(\"Checking gas limit signedness\") self.gas_limit_signedness_test() print(\"Checking gas price", "in one output\") self.normal_op_call_output_test() print(\"Calling inc() in one output with", "a signed integer) negative gas price calling inc() def gas_price_signedness_test(self):", "equal to gas_limit*gas_price\") self.gas_equal_to_tx_fee_test() print(\"Calling inc() in one output with", "inc() def gas_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\x01\\x00\", b\"\\xff\\xff\",", "normal raw op_call tx with a single output. def normal_op_call_output_test(self):", "inc() in one output with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_1_test() print(\"Second", "self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000)) gas_limit += b\"\\xff\" #", "input_value=500000000, should_throw=False): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16)", "gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_2_test() print(\"Second test of inc() in one output with", "outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\x7f\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs,", "{ uint public counter; function inc() public { counter +=", "and unspents[tx_i]['spendable']: break else: assert(False) inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']),", "tx containing 2 op_call outputs that has a combined gas_price*gas_limit", "a (if interpreted with a signed integer) negative gas price", "send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs, counter_should_increase_by): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address,", "test of inc() in one output with txfee < gas_limit*gas_price\")", "op_call output with a (if interpreted with a signed integer)", "= self.node.sendrawtransaction(tx) unspents = self.node.listunspent() i += 1 self.node.generate(1) sync_blocks(self.nodes)", "are correctly updated\") self.send_tx_with_value_test() print(\"Checking gas limit signedness where one", "txfee == gas_price*gas_limit. def gas_equal_to_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\",", "bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100) def send_tx_with_value_test(self): outputs = [] #", "< gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_1_test() print(\"Second test of inc() in one outputs", "= [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1,", "def send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0, input_value=500000000, should_throw=False): # 61bc221a counter() old_out", "counter() out = int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) def", "\"61bc221a\")['executionResult']['output'], 16) i = 0 unspents = self.node.listunspent() while i", "1 op_call output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_1_test(self): outputs", "b\"\\xff\\xff\\x00\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "= make_transaction(self.node, [inpt], outputs) txid = self.node.sendrawtransaction(tx) unspents = self.node.listunspent()", "txs are correctly updated\") self.send_tx_with_value_test() print(\"Checking gas limit signedness where", "== 1+NUM_DEFAULT_DGP_CONTRACTS) # Sends a tx containing 2 op_call outputs", "that has a combined gas_price*gas_limit exceeding the tx fee. #", "self.node.sendrawtransaction(tx) unspents = self.node.listunspent() i += 1 self.node.generate(1) sync_blocks(self.nodes) for", "self.node.listunspent() while i < num_txs and len(unspents) > 0: #", "inc() in one outputs with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_2_test() print(\"Second", "Bitcoin Core developers # Distributed under the MIT software license,", "self.gas_price_signedness_test() print(\"Checking gas limit and gas price signedness\") self.gas_limit_and_price_signedness_test() if", "from test_framework.fantasygold import * from test_framework.fantasygoldconfig import * import sys", "outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE),", "valid op_call txs def send_100_txs_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\",", "< gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_2_test() print(\"Second test of inc() in one output", "op_call outputs that has a combined gas_price*gas_limit exceeding the tx", "b\"\\x04\", b\"\\xff\\xff\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE) # sends", "b\"\\xff\\xff\", b\"\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff) # Sends 100", "gas limit signedness where one tx is valid\") self.gas_limit_signedness_one_valid_test() print(\"Checking", "run_test(self): self.node = self.nodes[0] connect_nodes(self.nodes[0], 1) self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany(\"\", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE", "bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE)", "def send_100_txs_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"),", "inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']), nSequence=0) tx = make_transaction(self.node, [inpt],", "assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS) # Sends a tx containing 2 op_call", "with an output calling inc()\") self.send_100_txs_test() print(\"Checking that the value", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True) # Sends a tx containing", "with txfee equal to gas_limit*gas_price\") self.gas_equal_to_tx_fee_test() print(\"Calling inc() in one", "print(\"Checking gas price signedness\") self.gas_price_signedness_test() print(\"Checking gas limit and gas", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs,", "uint public counter; function inc() public { counter += 1;", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE) # Sends a normal", "print(\"Checking gas limit and gas price signedness\") self.gas_limit_and_price_signedness_test() if __name__", "a tx containing 1 op_call output with a (if interpreted", "This tx should be rejected since executing such a tx", "test_framework.util import * from test_framework.script import * from test_framework.mininode import", "print(\"Checking gas limit signedness where one tx is valid\") self.gas_limit_signedness_one_valid_test()", "connect_nodes(self.nodes[0], 1) self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany(\"\", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE / Decimal('100000000') for i", "in one output with txfee < gas_limit*gas_price\") self.two_calls_in_same_tx_exceeding_tx_fee_test() print(\"Mining a", "b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100) def send_tx_with_value_test(self):", "accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs,", "Distributed under the MIT software license, see the accompanying #", "# 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) i =", "sync_blocks(self.nodes) for i in range(2): assert(self.nodes[i].getblockcount() == block_height+1) assert(len(self.nodes[i].listcontracts()) ==", "the same way as a tx with one output where", "price calling inc() def gas_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\",", "def send_tx_with_value_test(self): outputs = [] # d0e30db0 deposit() outputs.append(make_op_call_output(100000000, b\"\\x04\",", "unspents[tx_i]['spendable']: break else: assert(False) inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']), nSequence=0)", "16), unspents[tx_i]['vout']), nSequence=0) tx = make_transaction(self.node, [inpt], outputs) txid =", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff) # Sends 100 valid op_call txs", "input_value=1000000*FGC_MIN_GAS_PRICE) # Sends a tx containing 1 op_call output where", "try: self.node.sendrawtransaction(tx) assert(False) except JSONRPCException as e: print(e) pass else:", "assert(self.nodes[i].getblockcount() == block_height+1) assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS) # Sends a tx", "the value of txs are correctly updated\") self.send_tx_with_value_test() print(\"Checking gas", "and price calling inc() def gas_limit_and_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0,", "one output where txfee < gas_price*gas_limit. def two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs =", "self.gas_exceeding_tx_fee_100001_1_test() print(\"Second test of inc() in one outputs with txfee", "= make_transaction(self.node, [inpt], outputs) if should_throw: try: self.node.sendrawtransaction(tx) assert(False) except", "else: assert(False) inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']), nSequence=0) tx =", "Decimal('100000000') for i in range(200)}) print(\"Creating contract\") self.create_contract_test() print(\"Calling inc()", "for i in range(200)}) print(\"Creating contract\") self.create_contract_test() print(\"Calling inc() in", "block_height+1) assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS) # Sends a tx containing 2", "counter() out = int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) #", "while len(gas_limit) < 20: outputs.append(make_op_call_output(0, b\"\\x04\", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE) # Sends a tx containing 1", "{ counter += 1; } function getBalance() public { return", "should_throw=False): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) inpt", "[] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100)", "a block with 100 txs each with an output calling", "CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True) # sends a", "self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE) # Sends a tx containing 1 op_call", "gas_limit += b\"\\xff\" # sends a tx containing 1 op_call", "self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE) # 12065fe0 getBalance() balance = int(self.node.callcontract(self.contract_address, \"12065fe0\")['executionResult']['output'],", "== counter_should_increase_by) def send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs, counter_should_increase_by): # 61bc221a counter()", "assert(balance == 100000000) def run_test(self): self.node = self.nodes[0] connect_nodes(self.nodes[0], 1)", "output with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_1_test() print(\"Second test of inc()", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100) def send_tx_with_value_test(self): outputs = []", "self.node.generate(1) sync_blocks(self.nodes) for i in range(2): assert(self.nodes[i].getblockcount() == block_height+1) assert(len(self.nodes[i].listcontracts())", "of inc() in one output with txfee < gas_limit*gas_price\") self.two_calls_in_same_tx_exceeding_tx_fee_test()", "0: # Select as input a tx which has at", "public counter; function inc() public { counter += 1; }", "assert(out-old_out == counter_should_increase_by) def send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs, counter_should_increase_by): # 61bc221a", "100 valid op_call txs def send_100_txs_test(self): outputs = [] outputs.append(make_op_call_output(0,", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE) # sends a tx", "Core developers # Distributed under the MIT software license, see", "print(\"Mining a block with 100 txs each with an output", "def gas_limit_signedness_test(self): outputs = [] gas_limit = b\"\\xff\" while len(gas_limit)", "self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True) # Sends a tx containing 1 op_call", "(if interpreted with a signed integer) negative gas limit calling", "see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework", "unspents = self.node.listunspent() while i < num_txs and len(unspents) >", "function getBalance() public { return this.balance; } } \"\"\" contract_data", "= contract_data['address'] block_height = self.node.getblockcount() self.node.generate(1) sync_blocks(self.nodes) for i in", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True) # Sends a tx containing", "inc() def gas_limit_signedness_test(self): outputs = [] gas_limit = b\"\\xff\" while", "price signedness\") self.gas_price_signedness_test() print(\"Checking gas limit and gas price signedness\")", "such a tx would be unable to pay for its", "self.node.getblockcount() self.node.generate(1) sync_blocks(self.nodes) for i in range(2): assert(self.nodes[i].getblockcount() == block_height+1)", "self.create_contract_test() print(\"Calling inc() in two outputs\") self.many_calls_in_same_tx_test() print(\"Calling inc() in", "self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany(\"\", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE / Decimal('100000000') for i in range(200)})", "16) assert(out-old_out == counter_should_increase_by) # Deploy the testing contract def", "this.balance; } } \"\"\" contract_data = self.node.createcontract(\"6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029\", 1000000) self.contract_address =", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE) # Sends a tx", "1 op_call output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_2_test(self): outputs", "input_value=10000000) # sends a tx containing 1 op_call output with", "a tx containing 1 op_call output with a possible negative", "txs each with an output calling inc()\") self.send_100_txs_test() print(\"Checking that", "output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_2_test(self): outputs = []", "calling inc() def gas_limit_signedness_one_valid_test(self): outputs = [] gas_limit = b\"\\xff\"", "outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE) #", "b\"\\xff\\x7f\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE) # Sends a", "calling inc() def gas_limit_and_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\",", "b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True) # sends", "\"\"\" pragma solidity ^0.4.10; contract Example { uint public counter;", "self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100) def send_tx_with_value_test(self): outputs = [] # d0e30db0", "inc() in one output\") self.normal_op_call_output_test() print(\"Calling inc() in one output", "inc() in two outputs\") self.many_calls_in_same_tx_test() print(\"Calling inc() in one output\")", "txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_2_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\",", "outputs) if should_throw: try: self.node.sendrawtransaction(tx) assert(False) except JSONRPCException as e:", "self.gas_equal_to_tx_fee_test() print(\"Calling inc() in one output with txfee < gas_limit*gas_price\")", "int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) def send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs,", "make_transaction(self.node, [inpt], outputs) txid = self.node.sendrawtransaction(tx) unspents = self.node.listunspent() i", "counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE) # Sends a tx containing 1 op_call output", "= [] gas_limit = b\"\\xff\" while len(gas_limit) < 20: outputs.append(make_op_call_output(0,", "gas_price*gas_limit. def gas_equal_to_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE),", "^0.4.10; contract Example { uint public counter; function inc() public", "should_throw=True, input_value=10000000) # sends a tx containing 1 op_call output", "# sends a tx containing 1 op_call output with a", "[] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True)", "outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\x01\\x00\", b\"\\xff\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000) #", "make_vin(self.node, input_value) tx = make_transaction(self.node, [inpt], outputs) if should_throw: try:", "has at least 5 fantasygold spendable for tx_i in range(len(unspents)):", "def many_calls_in_same_tx_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"),", "potential execution costs in the same way as a tx", "outputs that has a combined gas_price*gas_limit exceeding the tx fee.", "from test_framework.mininode import * from test_framework.fantasygold import * from test_framework.fantasygoldconfig", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True) # Sends a tx", "as e: print(e) pass else: self.node.sendrawtransaction(tx) self.node.generate(1) sync_blocks(self.nodes) for i", "1 self.node.generate(1) sync_blocks(self.nodes) for i in range(2): # 61bc221a counter()", "< 20: outputs.append(make_op_call_output(0, b\"\\x04\", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True,", "single output. def normal_op_call_output_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\x7f\",", "input_value=100000000+1000000*FGC_MIN_GAS_PRICE) # 12065fe0 getBalance() balance = int(self.node.callcontract(self.contract_address, \"12065fe0\")['executionResult']['output'], 16) assert(balance", "signedness\") self.gas_limit_signedness_test() print(\"Checking gas price signedness\") self.gas_price_signedness_test() print(\"Checking gas limit", "gas limit signedness\") self.gas_limit_signedness_test() print(\"Checking gas price signedness\") self.gas_price_signedness_test() print(\"Checking", "outputs calling inc() def many_calls_in_same_tx_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\",", "gas_limit = b\"\\xff\" outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\\x00\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0,", "self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE) # sends a tx containing 1 op_call", "sends a tx containing 1 op_call output with a possible", "[] gas_limit = b\"\\xff\" while len(gas_limit) < 20: outputs.append(make_op_call_output(0, b\"\\x04\",", "gas_limit_signedness_one_valid_test(self): outputs = [] gas_limit = b\"\\xff\" outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\\x00\",", "software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php.", "sync_blocks(self.nodes) for i in range(2): # 61bc221a counter() out =", "[] gas_limit = b\"\\xff\" outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\\x00\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "= 2 self.extra_args = [['-txindex=1']]*2 def send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0, input_value=500000000,", "\"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) def send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs, counter_should_increase_by):", "<reponame>FantasyGold/FantasyGold-Core #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core", "def gas_limit_signedness_one_valid_test(self): outputs = [] gas_limit = b\"\\xff\" outputs.append(make_op_call_output(0, b\"\\x04\",", "one output with txfee equal to gas_limit*gas_price\") self.gas_equal_to_tx_fee_test() print(\"Calling inc()", "tx containing 1 op_call output with a possible negative gas", "testing contract def create_contract_test(self): \"\"\" pragma solidity ^0.4.10; contract Example", "contract_data = self.node.createcontract(\"6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029\", 1000000) self.contract_address = contract_data['address'] block_height = self.node.getblockcount()", "txfee < gas_price*gas_limit. def two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\",", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True,", "a possible negative gas limit and price calling inc() def", "outputs with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_2_test() print(\"Second test of inc()", "input_value=0xff*0xffff) # Sends 100 valid op_call txs def send_100_txs_test(self): outputs", "} } \"\"\" contract_data = self.node.createcontract(\"6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029\", 1000000) self.contract_address = contract_data['address']", "if should_throw: try: self.node.sendrawtransaction(tx) assert(False) except JSONRPCException as e: print(e)", "bytes.fromhex(\"d0e30db0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE) # 12065fe0 getBalance() balance =", "== gas_price*gas_limit. def gas_equal_to_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000),", "# 12065fe0 getBalance() balance = int(self.node.callcontract(self.contract_address, \"12065fe0\")['executionResult']['output'], 16) assert(balance ==", "assert(False) except JSONRPCException as e: print(e) pass else: self.node.sendrawtransaction(tx) self.node.generate(1)", "16) i = 0 unspents = self.node.listunspent() while i <", "a combined gas_price*gas_limit exceeding the tx fee. # This tx", "signed integer) negative gas limit calling inc() def gas_limit_signedness_test(self): outputs", "with a signed integer) negative gas price calling inc() def", "with a signed integer) negative gas limit calling inc() def", "gas price calling inc() def gas_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0,", "b\"\\xff\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000) # sends a tx", "Sends a normal raw op_call tx with a single output.", "COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework from test_framework.util import", "op_call output where txfee == gas_price*gas_limit. def gas_equal_to_tx_fee_test(self): outputs =", "\"61bc221a\")['executionResult']['output'], 16) inpt = make_vin(self.node, input_value) tx = make_transaction(self.node, [inpt],", "the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import", "self.send_tx_with_value_test() print(\"Checking gas limit signedness where one tx is valid\")", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE) # Sends a normal raw", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE) # Sends a tx containing", "(c) 2015-2016 The Bitcoin Core developers # Distributed under the", "public { return this.balance; } } \"\"\" contract_data = self.node.createcontract(\"6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029\",", "self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE) # Sends a tx containing 1 op_call", "op_call output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_2_test(self): outputs =", "self.send_100_txs_test() print(\"Checking that the value of txs are correctly updated\")", "def gas_exceeding_tx_fee_100001_2_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"),", "make_transaction(self.node, [inpt], outputs) if should_throw: try: self.node.sendrawtransaction(tx) assert(False) except JSONRPCException", "* from test_framework.fantasygold import * from test_framework.fantasygoldconfig import * import", "two outputs\") self.many_calls_in_same_tx_test() print(\"Calling inc() in one output\") self.normal_op_call_output_test() print(\"Calling", "Example { uint public counter; function inc() public { counter", "should be rejected since executing such a tx would be", "tx with one output where txfee < gas_price*gas_limit. def two_calls_in_same_tx_exceeding_tx_fee_test(self):", "1; } function getBalance() public { return this.balance; } }", "< gas_price*gas_limit. def two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000),", "a tx which has at least 5 fantasygold spendable for", "output with a possible negative gas limit and price calling", "negative gas price calling inc() def gas_price_signedness_test(self): outputs = []", "outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\\x00\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", CScriptNum(FGC_MIN_GAS_PRICE),", "negative gas limit and price calling inc() def gas_limit_and_price_signedness_test(self): outputs", "outputs.append(make_op_call_output(100000000, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"d0e30db0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE) #", "= 0 unspents = self.node.listunspent() while i < num_txs and", "integer) negative gas limit calling inc() def gas_limit_signedness_one_valid_test(self): outputs =", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True) # sends a tx", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True) # sends a tx containing 1", "= self.node.createcontract(\"6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029\", 1000000) self.contract_address = contract_data['address'] block_height = self.node.getblockcount() self.node.generate(1)", "== 1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']: break else: assert(False) inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'],", "gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_1_test() print(\"Second test of inc() in one outputs with", "inc()\") self.send_100_txs_test() print(\"Checking that the value of txs are correctly", "should_throw=True) # sends a tx containing 1 op_call output with", "gas_limit = b\"\\xff\" while len(gas_limit) < 20: outputs.append(make_op_call_output(0, b\"\\x04\", gas_limit,", "where one tx is valid\") self.gas_limit_signedness_one_valid_test() print(\"Checking gas limit signedness\")", "test_framework.mininode import * from test_framework.fantasygold import * from test_framework.fantasygoldconfig import", "limit signedness where one tx is valid\") self.gas_limit_signedness_one_valid_test() print(\"Checking gas", "= self.node.listunspent() while i < num_txs and len(unspents) > 0:", "int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) inpt = make_vin(self.node, input_value) tx = make_transaction(self.node,", "b\"\\xff\" outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\\x00\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\",", "in two outputs\") self.many_calls_in_same_tx_test() print(\"Calling inc() in one output\") self.normal_op_call_output_test()", "one output\") self.normal_op_call_output_test() print(\"Calling inc() in one output with txfee", "# Sends a tx containing 1 op_call output where txfee", "costs in the same way as a tx with one", "signed integer) negative gas price calling inc() def gas_price_signedness_test(self): outputs", "test of inc() in one outputs with txfee < gas_limit*gas_price\")", "OpCallTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args", "# Sends 100 valid op_call txs def send_100_txs_test(self): outputs =", "the MIT software license, see the accompanying # file COPYING", "set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [['-txindex=1']]*2", "16) assert(balance == 100000000) def run_test(self): self.node = self.nodes[0] connect_nodes(self.nodes[0],", "outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs,", "gas_price*gas_limit. def gas_exceeding_tx_fee_100001_1_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE),", "should_throw=True) # Sends a tx containing 2 op_call outputs that", "self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff) # Sends 100 valid op_call txs def", "self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True) # sends a tx containing 1 op_call", "outputs\") self.many_calls_in_same_tx_test() print(\"Calling inc() in one output\") self.normal_op_call_output_test() print(\"Calling inc()", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff) # Sends 100 valid op_call", "unspents = self.node.listunspent() i += 1 self.node.generate(1) sync_blocks(self.nodes) for i", "d0e30db0 deposit() outputs.append(make_op_call_output(100000000, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"d0e30db0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0,", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True) # Sends a tx", "(if interpreted with a signed integer) negative gas price calling", "self.gas_limit_signedness_one_valid_test() print(\"Checking gas limit signedness\") self.gas_limit_signedness_test() print(\"Checking gas price signedness\")", "op_call outputs calling inc() def many_calls_in_same_tx_test(self): outputs = [] outputs.append(make_op_call_output(0,", "output where txfee == gas_price*gas_limit. def gas_equal_to_tx_fee_test(self): outputs = []", "updated\") self.send_tx_with_value_test() print(\"Checking gas limit signedness where one tx is", "with a single output. def normal_op_call_output_test(self): outputs = [] outputs.append(make_op_call_output(0,", "[inpt], outputs) if should_throw: try: self.node.sendrawtransaction(tx) assert(False) except JSONRPCException as", "outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", b\"\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs,", "CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"d0e30db0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE) # 12065fe0 getBalance()", "balance = int(self.node.callcontract(self.contract_address, \"12065fe0\")['executionResult']['output'], 16) assert(balance == 100000000) def run_test(self):", "inc() def gas_limit_and_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", b\"\\xff\",", "# Sends a normal raw op_call tx with a single", "txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_1_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\",", "send_100_txs_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "of txs are correctly updated\") self.send_tx_with_value_test() print(\"Checking gas limit signedness", "possible negative gas limit and price calling inc() def gas_limit_and_price_signedness_test(self):", "b\"\\x04\", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True) # Sends", "integer) negative gas limit calling inc() def gas_limit_signedness_test(self): outputs =", "deposit() outputs.append(make_op_call_output(100000000, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"d0e30db0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE)", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000) # sends a tx containing", "0 unspents = self.node.listunspent() while i < num_txs and len(unspents)", "Select as input a tx which has at least 5", "gas_limit_signedness_test(self): outputs = [] gas_limit = b\"\\xff\" while len(gas_limit) <", "i in range(2): assert(self.nodes[i].getblockcount() == block_height+1) assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS) #", "as a tx with one output where txfee < gas_price*gas_limit.", "txfee equal to gas_limit*gas_price\") self.gas_equal_to_tx_fee_test() print(\"Calling inc() in one output", "i < num_txs and len(unspents) > 0: # Select as", "input_value=2*0xffff*FGC_MIN_GAS_PRICE) # sends a tx containing 1 op_call output with", "of inc() in one outputs with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_2_test()", "op_call output with a possible negative gas limit and price", "100 txs each with an output calling inc()\") self.send_100_txs_test() print(\"Checking", "10000000), 1000000000)) gas_limit += b\"\\xff\" # sends a tx containing", "outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\x01\\x00\", b\"\\xff\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs,", "1000000*FGC_MIN_GAS_PRICE / Decimal('100000000') for i in range(200)}) print(\"Creating contract\") self.create_contract_test()", "python3 # Copyright (c) 2015-2016 The Bitcoin Core developers #", "int(self.node.callcontract(self.contract_address, \"12065fe0\")['executionResult']['output'], 16) assert(balance == 100000000) def run_test(self): self.node =", "# 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) inpt =", "at least 5 fantasygold spendable for tx_i in range(len(unspents)): if", "def two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"),", "gas_exceeding_tx_fee_100001_1_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs,", "bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True)", "16) assert(out-old_out == counter_should_increase_by) def send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs, counter_should_increase_by): #", "self.node.sendrawtransaction(tx) self.node.generate(1) sync_blocks(self.nodes) for i in range(2): # 61bc221a counter()", "limit and price calling inc() def gas_limit_and_price_signedness_test(self): outputs = []", "where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_2_test(self): outputs = [] outputs.append(make_op_call_output(0,", "/ Decimal('100000000') for i in range(200)}) print(\"Creating contract\") self.create_contract_test() print(\"Calling", "self.normal_op_call_output_test() print(\"Calling inc() in one output with txfee equal to", "= int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) def send_multiple_op_call_txs_with_counter_check(self, num_txs,", "i += 1 self.node.generate(1) sync_blocks(self.nodes) for i in range(2): #", "two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "[] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\", b\"\\xff\", bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff)", "{ return this.balance; } } \"\"\" contract_data = self.node.createcontract(\"6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029\", 1000000)", "gas_price*gas_limit. def two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE),", "send_tx_with_value_test(self): outputs = [] # d0e30db0 deposit() outputs.append(make_op_call_output(100000000, b\"\\x04\", CScriptNum(1000000),", "outputs = [] gas_limit = b\"\\xff\" outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\xff\\x00\", CScriptNum(FGC_MIN_GAS_PRICE),", "self.node = self.nodes[0] connect_nodes(self.nodes[0], 1) self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany(\"\", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE /", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000)) gas_limit += b\"\\xff\"", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000)) gas_limit +=", "from test_framework.fantasygoldconfig import * import sys class OpCallTest(BitcoinTestFramework): def set_test_params(self):", "* from test_framework.script import * from test_framework.mininode import * from", "20: outputs.append(make_op_call_output(0, b\"\\x04\", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit),", "< gas_limit*gas_price\") self.two_calls_in_same_tx_exceeding_tx_fee_test() print(\"Mining a block with 100 txs each", "MIT software license, see the accompanying # file COPYING or", "+= 1 self.node.generate(1) sync_blocks(self.nodes) for i in range(2): # 61bc221a", "spendable for tx_i in range(len(unspents)): if int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE and", "input_value=2*1000000*FGC_MIN_GAS_PRICE) # Sends a normal raw op_call tx with a", "assert(out-old_out == counter_should_increase_by) # Deploy the testing contract def create_contract_test(self):", "outputs.append(make_op_call_output(0, b\"\\x04\", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE,", "\"\"\" contract_data = self.node.createcontract(\"6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029\", 1000000) self.contract_address = contract_data['address'] block_height =", "== block_height+1) assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS) # Sends a tx containing", "self.node.sendrawtransaction(tx) assert(False) except JSONRPCException as e: print(e) pass else: self.node.sendrawtransaction(tx)", "[] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE)", "def run_test(self): self.node = self.nodes[0] connect_nodes(self.nodes[0], 1) self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany(\"\", {self.node.getnewaddress():", "to pay for its potential execution costs in the same", "price calling inc() def gas_limit_and_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\",", "} \"\"\" contract_data = self.node.createcontract(\"6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029\", 1000000) self.contract_address = contract_data['address'] block_height", "self.node.generate(1) sync_blocks(self.nodes) for i in range(2): # 61bc221a counter() out", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE) # Sends a tx containing", "old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) i = 0 unspents =", "executing such a tx would be unable to pay for", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE) # sends a tx containing 1", "def gas_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\x01\\x00\", b\"\\xff\\xff\", bytes.fromhex(\"371303c0\"),", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE) # 12065fe0 getBalance() balance = int(self.node.callcontract(self.contract_address,", "output\") self.normal_op_call_output_test() print(\"Calling inc() in one output with txfee equal", "* from test_framework.mininode import * from test_framework.fantasygold import * from", "least 5 fantasygold spendable for tx_i in range(len(unspents)): if int(unspents[tx_i]['amount']*COIN)", "since executing such a tx would be unable to pay", "with a (if interpreted with a signed integer) negative gas", "Sends a tx containing 1 op_call output where txfee ==", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1,", "unable to pay for its potential execution costs in the", "in one output with txfee equal to gas_limit*gas_price\") self.gas_equal_to_tx_fee_test() print(\"Calling", "[['-txindex=1']]*2 def send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0, input_value=500000000, should_throw=False): # 61bc221a counter()", "a tx with one output where txfee < gas_price*gas_limit. def", "function inc() public { counter += 1; } function getBalance()", "way as a tx with one output where txfee <", "contract_data['address'] block_height = self.node.getblockcount() self.node.generate(1) sync_blocks(self.nodes) for i in range(2):", "self.num_nodes = 2 self.extra_args = [['-txindex=1']]*2 def send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0,", "inc() def gas_limit_signedness_one_valid_test(self): outputs = [] gas_limit = b\"\\xff\" outputs.append(make_op_call_output(0,", "2015-2016 The Bitcoin Core developers # Distributed under the MIT", "< gas_price*gas_limit. def gas_exceeding_tx_fee_100001_2_test(self): outputs = [] outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000001),", "= int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) # Deploy the", "outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\x7f\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE) #", "http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * from", "outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100) def", "signed integer) negative gas limit calling inc() def gas_limit_signedness_one_valid_test(self): outputs", "interpreted with a signed integer) negative gas price calling inc()", "signedness\") self.gas_price_signedness_test() print(\"Checking gas limit and gas price signedness\") self.gas_limit_and_price_signedness_test()", "# Deploy the testing contract def create_contract_test(self): \"\"\" pragma solidity", "should_throw=True) # Sends a tx containing 1 op_call output where", "in range(200)}) print(\"Creating contract\") self.create_contract_test() print(\"Calling inc() in two outputs\")", "CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']), nSequence=0) tx = make_transaction(self.node, [inpt], outputs) txid", "test_framework.script import * from test_framework.mininode import * from test_framework.fantasygold import", "CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address)))", "with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_1_test() print(\"Second test of inc() in", "bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE) # Sends a tx containing 1", "tx should be rejected since executing such a tx would", "containing 1 op_call output with a possible negative gas limit", "inc() in one output with txfee < gas_limit*gas_price\") self.two_calls_in_same_tx_exceeding_tx_fee_test() print(\"Mining", "# 61bc221a counter() out = int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out ==", "b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b\"\\x04\", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"),", "containing 2 op_call outputs calling inc() def many_calls_in_same_tx_test(self): outputs =", "raw op_call tx with a single output. def normal_op_call_output_test(self): outputs", "containing 1 op_call output with a (if interpreted with a", "old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) inpt = make_vin(self.node, input_value) tx", "bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True) # sends a tx containing", "61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) inpt = make_vin(self.node,", "pragma solidity ^0.4.10; contract Example { uint public counter; function", "= [['-txindex=1']]*2 def send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0, input_value=500000000, should_throw=False): # 61bc221a", "counter_should_increase_by) def send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs, counter_should_increase_by): # 61bc221a counter() old_out", "in range(len(unspents)): if int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']: break else:", "with txfee < gas_limit*gas_price\") self.gas_exceeding_tx_fee_100001_2_test() print(\"Second test of inc() in", "and gas price signedness\") self.gas_limit_and_price_signedness_test() if __name__ == '__main__': OpCallTest().main()", "tx = make_transaction(self.node, [inpt], outputs) txid = self.node.sendrawtransaction(tx) unspents =", "tx with a single output. def normal_op_call_output_test(self): outputs = []", "< num_txs and len(unspents) > 0: # Select as input", "range(len(unspents)): if int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']: break else: assert(False)", "= int(self.node.callcontract(self.contract_address, \"12065fe0\")['executionResult']['output'], 16) assert(balance == 100000000) def run_test(self): self.node", "print(\"Calling inc() in two outputs\") self.many_calls_in_same_tx_test() print(\"Calling inc() in one", "contract\") self.create_contract_test() print(\"Calling inc() in two outputs\") self.many_calls_in_same_tx_test() print(\"Calling inc()", "input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True) # Sends a tx containing 2 op_call outputs", "test_framework.fantasygoldconfig import * import sys class OpCallTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain", "16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000)) gas_limit += b\"\\xff\" # sends a tx", "the tx fee. # This tx should be rejected since", "a signed integer) negative gas limit calling inc() def gas_limit_signedness_one_valid_test(self):", "outputs = [] gas_limit = b\"\\xff\" while len(gas_limit) < 20:", "The Bitcoin Core developers # Distributed under the MIT software", "from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * from test_framework.script", "out = int(self.nodes[i].callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) # Deploy", "1 op_call output with a possible negative gas limit and", "61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, \"61bc221a\")['executionResult']['output'], 16) i = 0", "a tx would be unable to pay for its potential", "signedness where one tx is valid\") self.gas_limit_signedness_one_valid_test() print(\"Checking gas limit", "contract def create_contract_test(self): \"\"\" pragma solidity ^0.4.10; contract Example {", "CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"d0e30db0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE) # 12065fe0 getBalance() balance", "1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']: break else: assert(False) inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16),", "Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed under", "[] outputs.append(make_op_call_output(0, b\"\\x04\", b\"\\xff\\x7f\", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE)", "containing 1 op_call output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_2_test(self):", "a signed integer) negative gas limit calling inc() def gas_limit_signedness_test(self):", "self.node.listunspent() i += 1 self.node.generate(1) sync_blocks(self.nodes) for i in range(2):", "calling inc() def gas_limit_signedness_test(self): outputs = [] gas_limit = b\"\\xff\"", "be unable to pay for its potential execution costs in", "b\"\\xff\" while len(gas_limit) < 20: outputs.append(make_op_call_output(0, b\"\\x04\", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex(\"371303c0\")," ]
[ "_load(self): with fsspec.open(self._urlpath, mode='rb', **self._storage_options) as f: return f.read() def", "import DataSource, Schema import joblib import fsspec import sklearn import", "Schema import joblib import fsspec import sklearn import re from", "- ``s3://some-bucket/models/model.pkl`` \"\"\" self._urlpath = urlpath self._storage_options = storage_options or", "if s: sklearn_version = s.group(2).decode() else: sklearn_version = None self._schema", "else: sklearn_version = None self._schema = Schema( npartitions=1, extra_metadata={ 'sklearn_version':sklearn_version", "was created with Scikit-Learn version {} ' 'but version {}", "partition_access = False def __init__(self, urlpath, storage_options=None, metadata=None): \"\"\" Parameters", "with fsspec.open(self._urlpath, mode='rb', **self._storage_options) as f: return f.read() def _get_schema(self):", "pkl file Either the absolute or relative path to the", "opened. Some examples: - ``{{ CATALOG_DIR }}/models/model.pkl`` - ``s3://some-bucket/models/model.pkl`` \"\"\"", "def _load(self): with fsspec.open(self._urlpath, mode='rb', **self._storage_options) as f: return f.read()", "s: sklearn_version = s.group(2).decode() else: sklearn_version = None self._schema =", "self._schema def read(self): self._load_metadata() if not self.metadata['sklearn_version'] == sklearn.__version__: msg", "storage_options or {} super().__init__(metadata=metadata) def _load(self): with fsspec.open(self._urlpath, mode='rb', **self._storage_options)", "{} super().__init__(metadata=metadata) def _load(self): with fsspec.open(self._urlpath, mode='rb', **self._storage_options) as f:", "---------- urlpath: str, location of model pkl file Either the", "or relative path to the file or URL to be", "= storage_options or {} super().__init__(metadata=metadata) def _load(self): with fsspec.open(self._urlpath, mode='rb',", "the file or URL to be opened. Some examples: -", "Some examples: - ``{{ CATALOG_DIR }}/models/model.pkl`` - ``s3://some-bucket/models/model.pkl`` \"\"\" self._urlpath", "sklearn_version = s.group(2).decode() else: sklearn_version = None self._schema = Schema(", "name = 'sklearn' version = __version__ partition_access = False def", "version = __version__ partition_access = False def __init__(self, urlpath, storage_options=None,", "import fsspec import sklearn import re from . import __version__", "to the file or URL to be opened. Some examples:", "to be opened. Some examples: - ``{{ CATALOG_DIR }}/models/model.pkl`` -", "urlpath: str, location of model pkl file Either the absolute", "relative path to the file or URL to be opened.", "has been installed in your current environment.' ).format(self.metadata['sklearn_version'], sklearn.__version__) raise", "``s3://some-bucket/models/model.pkl`` \"\"\" self._urlpath = urlpath self._storage_options = storage_options or {}", "sklearn.__version__: msg = ('The model was created with Scikit-Learn version", "= 'python' name = 'sklearn' version = __version__ partition_access =", "of model pkl file Either the absolute or relative path", "= Schema( npartitions=1, extra_metadata={ 'sklearn_version':sklearn_version } ) return self._schema def", "container = 'python' name = 'sklearn' version = __version__ partition_access", "absolute or relative path to the file or URL to", "DataSource, Schema import joblib import fsspec import sklearn import re", "version {} has been installed in your current environment.' ).format(self.metadata['sklearn_version'],", "str, location of model pkl file Either the absolute or", "s = re.search(b'_sklearn_versionq(.*\\x00)((\\d+\\.)?(\\d+\\.)?(\\*|\\d+))q', as_binary) if s: sklearn_version = s.group(2).decode() else:", "def __init__(self, urlpath, storage_options=None, metadata=None): \"\"\" Parameters ---------- urlpath: str,", "fsspec.open(self._urlpath, mode='rb', **self._storage_options) as f: return f.read() def _get_schema(self): as_binary", "= False def __init__(self, urlpath, storage_options=None, metadata=None): \"\"\" Parameters ----------", "Scikit-Learn version {} ' 'but version {} has been installed", "examples: - ``{{ CATALOG_DIR }}/models/model.pkl`` - ``s3://some-bucket/models/model.pkl`` \"\"\" self._urlpath =", "} ) return self._schema def read(self): self._load_metadata() if not self.metadata['sklearn_version']", "= urlpath self._storage_options = storage_options or {} super().__init__(metadata=metadata) def _load(self):", "{} ' 'but version {} has been installed in your", "environment.' ).format(self.metadata['sklearn_version'], sklearn.__version__) raise RuntimeError(msg) with fsspec.open(self._urlpath, **self._storage_options) as f:", "_get_schema(self): as_binary = self._load() s = re.search(b'_sklearn_versionq(.*\\x00)((\\d+\\.)?(\\d+\\.)?(\\*|\\d+))q', as_binary) if s:", "= ('The model was created with Scikit-Learn version {} '", "metadata=None): \"\"\" Parameters ---------- urlpath: str, location of model pkl", "- ``{{ CATALOG_DIR }}/models/model.pkl`` - ``s3://some-bucket/models/model.pkl`` \"\"\" self._urlpath = urlpath", "urlpath, storage_options=None, metadata=None): \"\"\" Parameters ---------- urlpath: str, location of", "False def __init__(self, urlpath, storage_options=None, metadata=None): \"\"\" Parameters ---------- urlpath:", "super().__init__(metadata=metadata) def _load(self): with fsspec.open(self._urlpath, mode='rb', **self._storage_options) as f: return", "model pkl file Either the absolute or relative path to", "return self._schema def read(self): self._load_metadata() if not self.metadata['sklearn_version'] == sklearn.__version__:", "sklearn.__version__) raise RuntimeError(msg) with fsspec.open(self._urlpath, **self._storage_options) as f: return joblib.load(f)", ").format(self.metadata['sklearn_version'], sklearn.__version__) raise RuntimeError(msg) with fsspec.open(self._urlpath, **self._storage_options) as f: return", "location of model pkl file Either the absolute or relative", "\"\"\" Parameters ---------- urlpath: str, location of model pkl file", "= __version__ partition_access = False def __init__(self, urlpath, storage_options=None, metadata=None):", "import re from . import __version__ class SklearnModelSource(DataSource): container =", "' 'but version {} has been installed in your current", "read(self): self._load_metadata() if not self.metadata['sklearn_version'] == sklearn.__version__: msg = ('The", "Parameters ---------- urlpath: str, location of model pkl file Either", "self._load_metadata() if not self.metadata['sklearn_version'] == sklearn.__version__: msg = ('The model", "= 'sklearn' version = __version__ partition_access = False def __init__(self,", "SklearnModelSource(DataSource): container = 'python' name = 'sklearn' version = __version__", "= self._load() s = re.search(b'_sklearn_versionq(.*\\x00)((\\d+\\.)?(\\d+\\.)?(\\*|\\d+))q', as_binary) if s: sklearn_version =", "with Scikit-Learn version {} ' 'but version {} has been", "self.metadata['sklearn_version'] == sklearn.__version__: msg = ('The model was created with", "from . import __version__ class SklearnModelSource(DataSource): container = 'python' name", "'python' name = 'sklearn' version = __version__ partition_access = False", "= re.search(b'_sklearn_versionq(.*\\x00)((\\d+\\.)?(\\d+\\.)?(\\*|\\d+))q', as_binary) if s: sklearn_version = s.group(2).decode() else: sklearn_version", "from intake.source.base import DataSource, Schema import joblib import fsspec import", "npartitions=1, extra_metadata={ 'sklearn_version':sklearn_version } ) return self._schema def read(self): self._load_metadata()", "**self._storage_options) as f: return f.read() def _get_schema(self): as_binary = self._load()", "re from . import __version__ class SklearnModelSource(DataSource): container = 'python'", "__version__ partition_access = False def __init__(self, urlpath, storage_options=None, metadata=None): \"\"\"", "extra_metadata={ 'sklearn_version':sklearn_version } ) return self._schema def read(self): self._load_metadata() if", "created with Scikit-Learn version {} ' 'but version {} has", "def _get_schema(self): as_binary = self._load() s = re.search(b'_sklearn_versionq(.*\\x00)((\\d+\\.)?(\\d+\\.)?(\\*|\\d+))q', as_binary) if", "('The model was created with Scikit-Learn version {} ' 'but", "None self._schema = Schema( npartitions=1, extra_metadata={ 'sklearn_version':sklearn_version } ) return", "version {} ' 'but version {} has been installed in", "{} has been installed in your current environment.' ).format(self.metadata['sklearn_version'], sklearn.__version__)", "``{{ CATALOG_DIR }}/models/model.pkl`` - ``s3://some-bucket/models/model.pkl`` \"\"\" self._urlpath = urlpath self._storage_options", "self._load() s = re.search(b'_sklearn_versionq(.*\\x00)((\\d+\\.)?(\\d+\\.)?(\\*|\\d+))q', as_binary) if s: sklearn_version = s.group(2).decode()", "__version__ class SklearnModelSource(DataSource): container = 'python' name = 'sklearn' version", "or {} super().__init__(metadata=metadata) def _load(self): with fsspec.open(self._urlpath, mode='rb', **self._storage_options) as", "re.search(b'_sklearn_versionq(.*\\x00)((\\d+\\.)?(\\d+\\.)?(\\*|\\d+))q', as_binary) if s: sklearn_version = s.group(2).decode() else: sklearn_version =", "installed in your current environment.' ).format(self.metadata['sklearn_version'], sklearn.__version__) raise RuntimeError(msg) with", "\"\"\" self._urlpath = urlpath self._storage_options = storage_options or {} super().__init__(metadata=metadata)", "intake.source.base import DataSource, Schema import joblib import fsspec import sklearn", "model was created with Scikit-Learn version {} ' 'but version", "self._storage_options = storage_options or {} super().__init__(metadata=metadata) def _load(self): with fsspec.open(self._urlpath,", "= None self._schema = Schema( npartitions=1, extra_metadata={ 'sklearn_version':sklearn_version } )", ") return self._schema def read(self): self._load_metadata() if not self.metadata['sklearn_version'] ==", "'but version {} has been installed in your current environment.'", "self._schema = Schema( npartitions=1, extra_metadata={ 'sklearn_version':sklearn_version } ) return self._schema", "current environment.' ).format(self.metadata['sklearn_version'], sklearn.__version__) raise RuntimeError(msg) with fsspec.open(self._urlpath, **self._storage_options) as", "Either the absolute or relative path to the file or", "or URL to be opened. Some examples: - ``{{ CATALOG_DIR", "storage_options=None, metadata=None): \"\"\" Parameters ---------- urlpath: str, location of model", "as_binary = self._load() s = re.search(b'_sklearn_versionq(.*\\x00)((\\d+\\.)?(\\d+\\.)?(\\*|\\d+))q', as_binary) if s: sklearn_version", "def read(self): self._load_metadata() if not self.metadata['sklearn_version'] == sklearn.__version__: msg =", "sklearn_version = None self._schema = Schema( npartitions=1, extra_metadata={ 'sklearn_version':sklearn_version }", "if not self.metadata['sklearn_version'] == sklearn.__version__: msg = ('The model was", "been installed in your current environment.' ).format(self.metadata['sklearn_version'], sklearn.__version__) raise RuntimeError(msg)", "file Either the absolute or relative path to the file", "the absolute or relative path to the file or URL", "fsspec import sklearn import re from . import __version__ class", "not self.metadata['sklearn_version'] == sklearn.__version__: msg = ('The model was created", "joblib import fsspec import sklearn import re from . import", "urlpath self._storage_options = storage_options or {} super().__init__(metadata=metadata) def _load(self): with", "as f: return f.read() def _get_schema(self): as_binary = self._load() s", "'sklearn_version':sklearn_version } ) return self._schema def read(self): self._load_metadata() if not", "URL to be opened. Some examples: - ``{{ CATALOG_DIR }}/models/model.pkl``", "Schema( npartitions=1, extra_metadata={ 'sklearn_version':sklearn_version } ) return self._schema def read(self):", "be opened. Some examples: - ``{{ CATALOG_DIR }}/models/model.pkl`` - ``s3://some-bucket/models/model.pkl``", "self._urlpath = urlpath self._storage_options = storage_options or {} super().__init__(metadata=metadata) def", "f.read() def _get_schema(self): as_binary = self._load() s = re.search(b'_sklearn_versionq(.*\\x00)((\\d+\\.)?(\\d+\\.)?(\\*|\\d+))q', as_binary)", "'sklearn' version = __version__ partition_access = False def __init__(self, urlpath,", "path to the file or URL to be opened. Some", "<reponame>AlbertDeFusco/intake-sklearn<gh_stars>1-10 from intake.source.base import DataSource, Schema import joblib import fsspec", "as_binary) if s: sklearn_version = s.group(2).decode() else: sklearn_version = None", "sklearn import re from . import __version__ class SklearnModelSource(DataSource): container", "}}/models/model.pkl`` - ``s3://some-bucket/models/model.pkl`` \"\"\" self._urlpath = urlpath self._storage_options = storage_options", "msg = ('The model was created with Scikit-Learn version {}", "== sklearn.__version__: msg = ('The model was created with Scikit-Learn", "__init__(self, urlpath, storage_options=None, metadata=None): \"\"\" Parameters ---------- urlpath: str, location", "file or URL to be opened. Some examples: - ``{{", "mode='rb', **self._storage_options) as f: return f.read() def _get_schema(self): as_binary =", "return f.read() def _get_schema(self): as_binary = self._load() s = re.search(b'_sklearn_versionq(.*\\x00)((\\d+\\.)?(\\d+\\.)?(\\*|\\d+))q',", "= s.group(2).decode() else: sklearn_version = None self._schema = Schema( npartitions=1,", "CATALOG_DIR }}/models/model.pkl`` - ``s3://some-bucket/models/model.pkl`` \"\"\" self._urlpath = urlpath self._storage_options =", "your current environment.' ).format(self.metadata['sklearn_version'], sklearn.__version__) raise RuntimeError(msg) with fsspec.open(self._urlpath, **self._storage_options)", "import __version__ class SklearnModelSource(DataSource): container = 'python' name = 'sklearn'", "import joblib import fsspec import sklearn import re from .", "f: return f.read() def _get_schema(self): as_binary = self._load() s =", "class SklearnModelSource(DataSource): container = 'python' name = 'sklearn' version =", "s.group(2).decode() else: sklearn_version = None self._schema = Schema( npartitions=1, extra_metadata={", "import sklearn import re from . import __version__ class SklearnModelSource(DataSource):", "in your current environment.' ).format(self.metadata['sklearn_version'], sklearn.__version__) raise RuntimeError(msg) with fsspec.open(self._urlpath,", ". import __version__ class SklearnModelSource(DataSource): container = 'python' name =" ]
[ "a ``ParamListener``. \"\"\" from itertools import chain from jedi._compatibility import", "\"original\" functions, this way we can easily compare. # At", "# add the listener listener = ParamListener() func.listeners.add(listener) try: result", "self.param_possibilities = [] def execute(self, params): self.param_possibilities += params @debug.increase_indent", "with a ``ParamListener``. \"\"\" from itertools import chain from jedi._compatibility", "'__init__': cls = func.get_parent_scope() if isinstance(cls, pr.Class): func_name = unicode(cls.name)", "= func.get_parent_scope() if isinstance(cls, pr.Class): func_name = unicode(cls.name) compare =", "for t in parent.children[1:]: if t == '**': break if", "or not, but if there's also a call like ``foo('str')``,", "import imports class ParamListener(object): \"\"\" This listener is used to", "escope.decorates is not None: undec.append(escope.decorates) elif isinstance(escope, er.InstanceElement): undec.append(escope.var) else:", "is not None: undec.append(escope.decorates) elif isinstance(escope, er.InstanceElement): undec.append(escope.var) else: undec.append(escope)", "== '__init__': cls = func.get_parent_scope() if isinstance(cls, pr.Class): func_name =", "empty array. \"\"\" @memoize_default([], evaluator_is_first_arg=True) def get_posibilities(evaluator, module, func_name): try:", "features of |jedi| is to have an option to understand", "compare = func if func_name == '__init__': cls = func.get_parent_scope()", "There's no doubt wheter bar is an ``int`` or not,", "- |Jedi| sees a param - search for function calls", "module, func_name): try: names = module.used_names[func_name] except KeyError: return []", "if func_name == '__init__': cls = func.get_parent_scope() if isinstance(cls, pr.Class):", "[] for name in names: parent = name.parent if pr.is_node(parent,", "get_posibilities(evaluator, module, func_name): try: names = module.used_names[func_name] except KeyError: return", "we execute # it, otherwise just ignore it. evaluator.eval_trailer(types, trailer)", "jedi.evaluate.cache import memoize_default from jedi.evaluate import imports class ParamListener(object): \"\"\"", "not, but if there's also a call like ``foo('str')``, what", "it, otherwise just ignore it. evaluator.eval_trailer(types, trailer) return listener.param_possibilities return", "way we can easily compare. # At the same time", "type: >>> def func(foo): ... foo >>> func(1) >>> func(\"\")", "this:: def foo(bar): bar. # completion here foo(1) There's no", "escope in types: if escope.isinstance(er.Function, er.Instance) \\ and escope.decorates is", "imports.get_modules_containing_name(evaluator, [current_module], func_name): result = get_params_for_module(mod) if result: break finally:", "listener.param_possibilities return get_posibilities(evaluator, module, func_name) current_module = func.get_parent_until() func_name =", "t.start_pos > name.start_pos and t.children[0] == '(': trailer = t", "break if t.start_pos > name.start_pos and t.children[0] == '(': trailer", "try: result = [] # This is like backtracking: Get", "can easily compare. # At the same time we also", "None if pr.is_node(parent, 'power'): for t in parent.children[1:]: if t", "for mod in imports.get_modules_containing_name(evaluator, [current_module], func_name): result = get_params_for_module(mod) if", "= param.get_parent_until(pr.Function) # Compare the param names. names = [n", "param names. names = [n for n in search_function_call(evaluator, func)", "# At the same time we also have to remove", "the really important features of |jedi| is to have an", "from jedi import settings from jedi import debug from jedi.evaluate.cache", "param values. If you try to complete a type: >>>", "search_function_call(evaluator, func): \"\"\" Returns a list of param names. \"\"\"", "if t == '**': break if t.start_pos > name.start_pos and", "function. \"\"\" def __init__(self): self.param_possibilities = [] def execute(self, params):", "[] # This is like backtracking: Get the first possible", "def func(foo): ... foo >>> func(1) >>> func(\"\") It is", "parent.children[1:]: if t == '**': break if t.start_pos > name.start_pos", "parent = name.parent if pr.is_node(parent, 'trailer'): parent = parent.parent trailer", "function calls named ``foo`` - execute these calls and check", "or an empty array. \"\"\" @memoize_default([], evaluator_is_first_arg=True) def get_posibilities(evaluator, module,", "== '(': trailer = t break if trailer is not", "ParamListener(object): \"\"\" This listener is used to get the params", "= name.parent if pr.is_node(parent, 'trailer'): parent = parent.parent trailer =", "bar. # completion here foo(1) There's no doubt wheter bar", "same time we also have to remove InstanceElements. undec =", "result @memoize_default([], evaluator_is_first_arg=True) def search_function_call(evaluator, func): \"\"\" Returns a list", "foo >>> func(1) >>> func(\"\") It is not known what", "in undec: # Only if we have the correct function", "names = [n for n in search_function_call(evaluator, func) if n.value", "We have to remove decorators, because they are not the", "settings.dynamic_params: return [] debug.dbg('Dynamic param search for %s', param) func", "function we execute # it, otherwise just ignore it. evaluator.eval_trailer(types,", "Only if we have the correct function we execute #", "tree as pr from jedi import settings from jedi import", "# Evaluate the ExecutedParams to types. result = list(chain.from_iterable(n.parent.eval(evaluator) for", ">>> func(1) >>> func(\"\") It is not known what the", "follows: - |Jedi| sees a param - search for function", "time we also have to remove InstanceElements. undec = []", "parent.parent trailer = None if pr.is_node(parent, 'power'): for t in", "to understand code like this:: def foo(bar): bar. # completion", "def __init__(self): self.param_possibilities = [] def execute(self, params): self.param_possibilities +=", "analysing the whole code. You have to look for all", "names = module.used_names[func_name] except KeyError: return [] for name in", "is used to get the params for a function. \"\"\"", "= func.get_parent_until() func_name = unicode(func.name) compare = func if func_name", "cleanup: remove the listener; important: should not stick. func.listeners.remove(listener) return", "func.get_parent_until() func_name = unicode(func.name) compare = func if func_name ==", "ExecutedParams to types. result = list(chain.from_iterable(n.parent.eval(evaluator) for n in names))", "= None if pr.is_node(parent, 'power'): for t in parent.children[1:]: if", "foo(1) There's no doubt wheter bar is an ``int`` or", "whole code. You have to look for all calls to", "to have an option to understand code like this:: def", "not known what the type ``foo`` without analysing the whole", "in types: if escope.isinstance(er.Function, er.Instance) \\ and escope.decorates is not", "n in names)) debug.dbg('Dynamic param result %s', result) return result", "if t.start_pos > name.start_pos and t.children[0] == '(': trailer =", "from jedi.parser import tree as pr from jedi import settings", "return get_posibilities(evaluator, module, func_name) current_module = func.get_parent_until() func_name = unicode(func.name)", "name.parent if pr.is_node(parent, 'trailer'): parent = parent.parent trailer = None", "search for param values. If you try to complete a", "@memoize_default([], evaluator_is_first_arg=True) def get_posibilities(evaluator, module, func_name): try: names = module.used_names[func_name]", "in names: parent = name.parent if pr.is_node(parent, 'trailer'): parent =", "the listener listener = ParamListener() func.listeners.add(listener) try: result = []", "just show both. Because that's what a human would expect.", "also have to remove InstanceElements. undec = [] for escope", "return [] for name in names: parent = name.parent if", "remove decorators, because they are not the # \"original\" functions,", "code like this:: def foo(bar): bar. # completion here foo(1)", "return listener.param_possibilities return get_posibilities(evaluator, module, func_name) current_module = func.get_parent_until() func_name", "t == '**': break if t.start_pos > name.start_pos and t.children[0]", "also a call like ``foo('str')``, what would happen? Well, we'll", "for name in names: parent = name.parent if pr.is_node(parent, 'trailer'):", "from jedi.evaluate import representation as er def get_params_for_module(module): \"\"\" Returns", "the same time we also have to remove InstanceElements. undec", "er def get_params_for_module(module): \"\"\" Returns the values of a param,", "representation as er def get_params_for_module(module): \"\"\" Returns the values of", "it. evaluator.eval_trailer(types, trailer) return listener.param_possibilities return get_posibilities(evaluator, module, func_name) current_module", "\"\"\" Returns a list of param names. \"\"\" from jedi.evaluate", "if we have the correct function we execute # it,", "to complete a type: >>> def func(foo): ... foo >>>", "``ParamListener``. \"\"\" from itertools import chain from jedi._compatibility import unicode", "listener is used to get the params for a function.", "unicode(cls.name) compare = cls # add the listener listener =", "of param names. \"\"\" from jedi.evaluate import representation as er", "result: break finally: # cleanup: remove the listener; important: should", "listener = ParamListener() func.listeners.add(listener) try: result = [] # This", "show both. Because that's what a human would expect. It", "is not None: types = evaluator.goto_definition(name) # We have to", "of the really important features of |jedi| is to have", "execute(self, params): self.param_possibilities += params @debug.increase_indent def search_params(evaluator, param): \"\"\"", "= get_params_for_module(mod) if result: break finally: # cleanup: remove the", "import unicode from jedi.parser import tree as pr from jedi", "have the correct function we execute # it, otherwise just", "None: undec.append(escope.decorates) elif isinstance(escope, er.InstanceElement): undec.append(escope.var) else: undec.append(escope) if er.wrap(evaluator,", "import chain from jedi._compatibility import unicode from jedi.parser import tree", "search_params(evaluator, param): \"\"\" A dynamic search for param values. If", "if there's also a call like ``foo('str')``, what would happen?", "func_name = unicode(func.name) compare = func if func_name == '__init__':", "@debug.increase_indent def search_params(evaluator, param): \"\"\" A dynamic search for param", "Get the first possible result. for mod in imports.get_modules_containing_name(evaluator, [current_module],", "Returns the values of a param, or an empty array.", "if not settings.dynamic_params: return [] debug.dbg('Dynamic param search for %s',", "not settings.dynamic_params: return [] debug.dbg('Dynamic param search for %s', param)", "result) return result @memoize_default([], evaluator_is_first_arg=True) def search_function_call(evaluator, func): \"\"\" Returns", "out what ``foo`` possibly is. \"\"\" if not settings.dynamic_params: return", "return [] debug.dbg('Dynamic param search for %s', param) func =", "param - search for function calls named ``foo`` - execute", "At the same time we also have to remove InstanceElements.", "memoize_default from jedi.evaluate import imports class ParamListener(object): \"\"\" This listener", "func.listeners.add(listener) try: result = [] # This is like backtracking:", "jedi import debug from jedi.evaluate.cache import memoize_default from jedi.evaluate import", "is like backtracking: Get the first possible result. for mod", "what would happen? Well, we'll just show both. Because that's", "are not the # \"original\" functions, this way we can", "but if there's also a call like ``foo('str')``, what would", "if result: break finally: # cleanup: remove the listener; important:", "import tree as pr from jedi import settings from jedi", "pr.is_node(parent, 'power'): for t in parent.children[1:]: if t == '**':", "values. If you try to complete a type: >>> def", "Returns a list of param names. \"\"\" from jedi.evaluate import", "== '**': break if t.start_pos > name.start_pos and t.children[0] ==", "types = evaluator.goto_definition(name) # We have to remove decorators, because", "return result @memoize_default([], evaluator_is_first_arg=True) def search_function_call(evaluator, func): \"\"\" Returns a", "not None: undec.append(escope.decorates) elif isinstance(escope, er.InstanceElement): undec.append(escope.var) else: undec.append(escope) if", "these calls and check the input. This work with a", "find out what ``foo`` possibly is. \"\"\" if not settings.dynamic_params:", "jedi.evaluate import imports class ParamListener(object): \"\"\" This listener is used", "import representation as er def get_params_for_module(module): \"\"\" Returns the values", "imports class ParamListener(object): \"\"\" This listener is used to get", "understand code like this:: def foo(bar): bar. # completion here", "This is like backtracking: Get the first possible result. for", "escope.isinstance(er.Function, er.Instance) \\ and escope.decorates is not None: undec.append(escope.decorates) elif", "we have the correct function we execute # it, otherwise", "because they are not the # \"original\" functions, this way", "def search_function_call(evaluator, func): \"\"\" Returns a list of param names.", "= [n for n in search_function_call(evaluator, func) if n.value ==", "import settings from jedi import debug from jedi.evaluate.cache import memoize_default", "result = [] # This is like backtracking: Get the", "just ignore it. evaluator.eval_trailer(types, trailer) return listener.param_possibilities return get_posibilities(evaluator, module,", "cls # add the listener listener = ParamListener() func.listeners.add(listener) try:", ">>> def func(foo): ... foo >>> func(1) >>> func(\"\") It", "listener listener = ParamListener() func.listeners.add(listener) try: result = [] #", "``foo`` - execute these calls and check the input. This", "execute # it, otherwise just ignore it. evaluator.eval_trailer(types, trailer) return", "func) if n.value == param.name.value] # Evaluate the ExecutedParams to", "# completion here foo(1) There's no doubt wheter bar is", "would expect. It works as follows: - |Jedi| sees a", "pr.Class): func_name = unicode(cls.name) compare = cls # add the", "Well, we'll just show both. Because that's what a human", "One of the really important features of |jedi| is to", "= func if func_name == '__init__': cls = func.get_parent_scope() if", "It is not known what the type ``foo`` without analysing", "list(chain.from_iterable(n.parent.eval(evaluator) for n in names)) debug.dbg('Dynamic param result %s', result)", "result %s', result) return result @memoize_default([], evaluator_is_first_arg=True) def search_function_call(evaluator, func):", "er.Instance) \\ and escope.decorates is not None: undec.append(escope.decorates) elif isinstance(escope,", "from itertools import chain from jedi._compatibility import unicode from jedi.parser", ">>> func(\"\") It is not known what the type ``foo``", "jedi.parser import tree as pr from jedi import settings from", "def get_posibilities(evaluator, module, func_name): try: names = module.used_names[func_name] except KeyError:", "class ParamListener(object): \"\"\" This listener is used to get the", "'trailer'): parent = parent.parent trailer = None if pr.is_node(parent, 'power'):", "cls = func.get_parent_scope() if isinstance(cls, pr.Class): func_name = unicode(cls.name) compare", "finally: # cleanup: remove the listener; important: should not stick.", "both. Because that's what a human would expect. It works", "dynamic search for param values. If you try to complete", "the whole code. You have to look for all calls", "decorators, because they are not the # \"original\" functions, this", "n.value == param.name.value] # Evaluate the ExecutedParams to types. result", "unicode from jedi.parser import tree as pr from jedi import", "# Compare the param names. names = [n for n", "``foo('str')``, what would happen? Well, we'll just show both. Because", "self.param_possibilities += params @debug.increase_indent def search_params(evaluator, param): \"\"\" A dynamic", "undec.append(escope) if er.wrap(evaluator, compare) in undec: # Only if we", "func_name): result = get_params_for_module(mod) if result: break finally: # cleanup:", "undec.append(escope.decorates) elif isinstance(escope, er.InstanceElement): undec.append(escope.var) else: undec.append(escope) if er.wrap(evaluator, compare)", "``int`` or not, but if there's also a call like", "code. You have to look for all calls to ``func``", "wheter bar is an ``int`` or not, but if there's", "add the listener listener = ParamListener() func.listeners.add(listener) try: result =", "param names. \"\"\" from jedi.evaluate import representation as er def", "It works as follows: - |Jedi| sees a param -", "types: if escope.isinstance(er.Function, er.Instance) \\ and escope.decorates is not None:", "if er.wrap(evaluator, compare) in undec: # Only if we have", "trailer is not None: types = evaluator.goto_definition(name) # We have", "undec: # Only if we have the correct function we", "- execute these calls and check the input. This work", "Because that's what a human would expect. It works as", "if escope.isinstance(er.Function, er.Instance) \\ and escope.decorates is not None: undec.append(escope.decorates)", "this way we can easily compare. # At the same", "first possible result. for mod in imports.get_modules_containing_name(evaluator, [current_module], func_name): result", "\"\"\" @memoize_default([], evaluator_is_first_arg=True) def get_posibilities(evaluator, module, func_name): try: names =", "mod in imports.get_modules_containing_name(evaluator, [current_module], func_name): result = get_params_for_module(mod) if result:", "used to get the params for a function. \"\"\" def", "= ParamListener() func.listeners.add(listener) try: result = [] # This is", "param): \"\"\" A dynamic search for param values. If you", "input. This work with a ``ParamListener``. \"\"\" from itertools import", "\"\"\" A dynamic search for param values. If you try", "is an ``int`` or not, but if there's also a", "== param.name.value] # Evaluate the ExecutedParams to types. result =", "compare. # At the same time we also have to", "else: undec.append(escope) if er.wrap(evaluator, compare) in undec: # Only if", "have to remove decorators, because they are not the #", "[] def execute(self, params): self.param_possibilities += params @debug.increase_indent def search_params(evaluator,", "in parent.children[1:]: if t == '**': break if t.start_pos >", "'**': break if t.start_pos > name.start_pos and t.children[0] == '(':", "work with a ``ParamListener``. \"\"\" from itertools import chain from", "debug.dbg('Dynamic param search for %s', param) func = param.get_parent_until(pr.Function) #", "t.children[0] == '(': trailer = t break if trailer is", "a human would expect. It works as follows: - |Jedi|", "param) func = param.get_parent_until(pr.Function) # Compare the param names. names", "evaluator_is_first_arg=True) def get_posibilities(evaluator, module, func_name): try: names = module.used_names[func_name] except", "calls to ``func`` to find out what ``foo`` possibly is.", "\"\"\" from itertools import chain from jedi._compatibility import unicode from", "import debug from jedi.evaluate.cache import memoize_default from jedi.evaluate import imports", "works as follows: - |Jedi| sees a param - search", "param result %s', result) return result @memoize_default([], evaluator_is_first_arg=True) def search_function_call(evaluator,", "def search_params(evaluator, param): \"\"\" A dynamic search for param values.", "= evaluator.goto_definition(name) # We have to remove decorators, because they", "... foo >>> func(1) >>> func(\"\") It is not known", "func_name) current_module = func.get_parent_until() func_name = unicode(func.name) compare = func", "is to have an option to understand code like this::", "for param values. If you try to complete a type:", "search_function_call(evaluator, func) if n.value == param.name.value] # Evaluate the ExecutedParams", "isinstance(cls, pr.Class): func_name = unicode(cls.name) compare = cls # add", "t in parent.children[1:]: if t == '**': break if t.start_pos", "%s', result) return result @memoize_default([], evaluator_is_first_arg=True) def search_function_call(evaluator, func): \"\"\"", "an option to understand code like this:: def foo(bar): bar.", "named ``foo`` - execute these calls and check the input.", "remove the listener; important: should not stick. func.listeners.remove(listener) return result", "of |jedi| is to have an option to understand code", "parent = parent.parent trailer = None if pr.is_node(parent, 'power'): for", "# We have to remove decorators, because they are not", "Compare the param names. names = [n for n in", "%s', param) func = param.get_parent_until(pr.Function) # Compare the param names.", "a param - search for function calls named ``foo`` -", "human would expect. It works as follows: - |Jedi| sees", "here foo(1) There's no doubt wheter bar is an ``int``", "calls named ``foo`` - execute these calls and check the", "a list of param names. \"\"\" from jedi.evaluate import representation", "the type ``foo`` without analysing the whole code. You have", "'power'): for t in parent.children[1:]: if t == '**': break", "# it, otherwise just ignore it. evaluator.eval_trailer(types, trailer) return listener.param_possibilities", "= [] # This is like backtracking: Get the first", "get_params_for_module(module): \"\"\" Returns the values of a param, or an", "name.start_pos and t.children[0] == '(': trailer = t break if", "for n in search_function_call(evaluator, func) if n.value == param.name.value] #", "we can easily compare. # At the same time we", "if pr.is_node(parent, 'trailer'): parent = parent.parent trailer = None if", "is not known what the type ``foo`` without analysing the", "the params for a function. \"\"\" def __init__(self): self.param_possibilities =", "the ExecutedParams to types. result = list(chain.from_iterable(n.parent.eval(evaluator) for n in", "This listener is used to get the params for a", "compare = cls # add the listener listener = ParamListener()", "to remove InstanceElements. undec = [] for escope in types:", "get_posibilities(evaluator, module, func_name) current_module = func.get_parent_until() func_name = unicode(func.name) compare", "jedi._compatibility import unicode from jedi.parser import tree as pr from", "= t break if trailer is not None: types =", "debug from jedi.evaluate.cache import memoize_default from jedi.evaluate import imports class", "would happen? Well, we'll just show both. Because that's what", "expect. It works as follows: - |Jedi| sees a param", "# cleanup: remove the listener; important: should not stick. func.listeners.remove(listener)", "as follows: - |Jedi| sees a param - search for", "- search for function calls named ``foo`` - execute these", "types. result = list(chain.from_iterable(n.parent.eval(evaluator) for n in names)) debug.dbg('Dynamic param", "call like ``foo('str')``, what would happen? Well, we'll just show", "in imports.get_modules_containing_name(evaluator, [current_module], func_name): result = get_params_for_module(mod) if result: break", "complete a type: >>> def func(foo): ... foo >>> func(1)", "a call like ``foo('str')``, what would happen? Well, we'll just", "possible result. for mod in imports.get_modules_containing_name(evaluator, [current_module], func_name): result =", "= cls # add the listener listener = ParamListener() func.listeners.add(listener)", "remove InstanceElements. undec = [] for escope in types: if", "KeyError: return [] for name in names: parent = name.parent", "names. \"\"\" from jedi.evaluate import representation as er def get_params_for_module(module):", "except KeyError: return [] for name in names: parent =", "search for %s', param) func = param.get_parent_until(pr.Function) # Compare the", "from jedi._compatibility import unicode from jedi.parser import tree as pr", "list of param names. \"\"\" from jedi.evaluate import representation as", "= [] for escope in types: if escope.isinstance(er.Function, er.Instance) \\", "the input. This work with a ``ParamListener``. \"\"\" from itertools", "from jedi.evaluate.cache import memoize_default from jedi.evaluate import imports class ParamListener(object):", "itertools import chain from jedi._compatibility import unicode from jedi.parser import", "try: names = module.used_names[func_name] except KeyError: return [] for name", "# \"original\" functions, this way we can easily compare. #", "for a function. \"\"\" def __init__(self): self.param_possibilities = [] def", "jedi import settings from jedi import debug from jedi.evaluate.cache import", "= module.used_names[func_name] except KeyError: return [] for name in names:", "search for function calls named ``foo`` - execute these calls", "If you try to complete a type: >>> def func(foo):", "trailer = None if pr.is_node(parent, 'power'): for t in parent.children[1:]:", "This work with a ``ParamListener``. \"\"\" from itertools import chain", "an ``int`` or not, but if there's also a call", "type ``foo`` without analysing the whole code. You have to", "current_module = func.get_parent_until() func_name = unicode(func.name) compare = func if", "settings from jedi import debug from jedi.evaluate.cache import memoize_default from", "a type: >>> def func(foo): ... foo >>> func(1) >>>", "chain from jedi._compatibility import unicode from jedi.parser import tree as", "t break if trailer is not None: types = evaluator.goto_definition(name)", "to remove decorators, because they are not the # \"original\"", "func_name): try: names = module.used_names[func_name] except KeyError: return [] for", "[current_module], func_name): result = get_params_for_module(mod) if result: break finally: #", "like ``foo('str')``, what would happen? Well, we'll just show both.", "doubt wheter bar is an ``int`` or not, but if", "= list(chain.from_iterable(n.parent.eval(evaluator) for n in names)) debug.dbg('Dynamic param result %s',", "the correct function we execute # it, otherwise just ignore", "# Only if we have the correct function we execute", "not None: types = evaluator.goto_definition(name) # We have to remove", "we'll just show both. Because that's what a human would", "names. names = [n for n in search_function_call(evaluator, func) if", "params): self.param_possibilities += params @debug.increase_indent def search_params(evaluator, param): \"\"\" A", "known what the type ``foo`` without analysing the whole code.", "have an option to understand code like this:: def foo(bar):", "func = param.get_parent_until(pr.Function) # Compare the param names. names =", "\"\"\" def __init__(self): self.param_possibilities = [] def execute(self, params): self.param_possibilities", "result = list(chain.from_iterable(n.parent.eval(evaluator) for n in names)) debug.dbg('Dynamic param result", "the values of a param, or an empty array. \"\"\"", "__init__(self): self.param_possibilities = [] def execute(self, params): self.param_possibilities += params", "A dynamic search for param values. If you try to", "@memoize_default([], evaluator_is_first_arg=True) def search_function_call(evaluator, func): \"\"\" Returns a list of", "have to remove InstanceElements. undec = [] for escope in", "pr.is_node(parent, 'trailer'): parent = parent.parent trailer = None if pr.is_node(parent,", "elif isinstance(escope, er.InstanceElement): undec.append(escope.var) else: undec.append(escope) if er.wrap(evaluator, compare) in", "a function. \"\"\" def __init__(self): self.param_possibilities = [] def execute(self,", "undec.append(escope.var) else: undec.append(escope) if er.wrap(evaluator, compare) in undec: # Only", "func if func_name == '__init__': cls = func.get_parent_scope() if isinstance(cls,", "result = get_params_for_module(mod) if result: break finally: # cleanup: remove", "\"\"\" One of the really important features of |jedi| is", "array. \"\"\" @memoize_default([], evaluator_is_first_arg=True) def get_posibilities(evaluator, module, func_name): try: names", "not the # \"original\" functions, this way we can easily", "what the type ``foo`` without analysing the whole code. You", "'(': trailer = t break if trailer is not None:", "\"\"\" from jedi.evaluate import representation as er def get_params_for_module(module): \"\"\"", "evaluator.eval_trailer(types, trailer) return listener.param_possibilities return get_posibilities(evaluator, module, func_name) current_module =", "er.InstanceElement): undec.append(escope.var) else: undec.append(escope) if er.wrap(evaluator, compare) in undec: #", "param search for %s', param) func = param.get_parent_until(pr.Function) # Compare", "for n in names)) debug.dbg('Dynamic param result %s', result) return", "like backtracking: Get the first possible result. for mod in", "func_name == '__init__': cls = func.get_parent_scope() if isinstance(cls, pr.Class): func_name", "undec = [] for escope in types: if escope.isinstance(er.Function, er.Instance)", "trailer) return listener.param_possibilities return get_posibilities(evaluator, module, func_name) current_module = func.get_parent_until()", "[] debug.dbg('Dynamic param search for %s', param) func = param.get_parent_until(pr.Function)", "unicode(func.name) compare = func if func_name == '__init__': cls =", "for %s', param) func = param.get_parent_until(pr.Function) # Compare the param", "= parent.parent trailer = None if pr.is_node(parent, 'power'): for t", "er.wrap(evaluator, compare) in undec: # Only if we have the", "+= params @debug.increase_indent def search_params(evaluator, param): \"\"\" A dynamic search", "possibly is. \"\"\" if not settings.dynamic_params: return [] debug.dbg('Dynamic param", "``foo`` possibly is. \"\"\" if not settings.dynamic_params: return [] debug.dbg('Dynamic", "You have to look for all calls to ``func`` to", "isinstance(escope, er.InstanceElement): undec.append(escope.var) else: undec.append(escope) if er.wrap(evaluator, compare) in undec:", "break if trailer is not None: types = evaluator.goto_definition(name) #", "look for all calls to ``func`` to find out what", "def execute(self, params): self.param_possibilities += params @debug.increase_indent def search_params(evaluator, param):", "otherwise just ignore it. evaluator.eval_trailer(types, trailer) return listener.param_possibilities return get_posibilities(evaluator,", "module, func_name) current_module = func.get_parent_until() func_name = unicode(func.name) compare =", "evaluator.goto_definition(name) # We have to remove decorators, because they are", "foo(bar): bar. # completion here foo(1) There's no doubt wheter", "> name.start_pos and t.children[0] == '(': trailer = t break", "for all calls to ``func`` to find out what ``foo``", "they are not the # \"original\" functions, this way we", "ignore it. evaluator.eval_trailer(types, trailer) return listener.param_possibilities return get_posibilities(evaluator, module, func_name)", "in names)) debug.dbg('Dynamic param result %s', result) return result @memoize_default([],", "what a human would expect. It works as follows: -", "try to complete a type: >>> def func(foo): ... foo", "Evaluate the ExecutedParams to types. result = list(chain.from_iterable(n.parent.eval(evaluator) for n", "from jedi.evaluate import imports class ParamListener(object): \"\"\" This listener is", "functions, this way we can easily compare. # At the", "= unicode(func.name) compare = func if func_name == '__init__': cls", "as pr from jedi import settings from jedi import debug", "as er def get_params_for_module(module): \"\"\" Returns the values of a", "ParamListener() func.listeners.add(listener) try: result = [] # This is like", "to look for all calls to ``func`` to find out", "\"\"\" if not settings.dynamic_params: return [] debug.dbg('Dynamic param search for", "[n for n in search_function_call(evaluator, func) if n.value == param.name.value]", "important features of |jedi| is to have an option to", "|Jedi| sees a param - search for function calls named", "``func`` to find out what ``foo`` possibly is. \"\"\" if", "happen? Well, we'll just show both. Because that's what a", "and check the input. This work with a ``ParamListener``. \"\"\"", "no doubt wheter bar is an ``int`` or not, but", "params for a function. \"\"\" def __init__(self): self.param_possibilities = []", "if isinstance(cls, pr.Class): func_name = unicode(cls.name) compare = cls #", "is. \"\"\" if not settings.dynamic_params: return [] debug.dbg('Dynamic param search", "\\ and escope.decorates is not None: undec.append(escope.decorates) elif isinstance(escope, er.InstanceElement):", "what ``foo`` possibly is. \"\"\" if not settings.dynamic_params: return []", "def foo(bar): bar. # completion here foo(1) There's no doubt", "easily compare. # At the same time we also have", "name in names: parent = name.parent if pr.is_node(parent, 'trailer'): parent", "and escope.decorates is not None: undec.append(escope.decorates) elif isinstance(escope, er.InstanceElement): undec.append(escope.var)", "have to look for all calls to ``func`` to find", "= unicode(cls.name) compare = cls # add the listener listener", "values of a param, or an empty array. \"\"\" @memoize_default([],", "really important features of |jedi| is to have an option", "there's also a call like ``foo('str')``, what would happen? Well,", "if pr.is_node(parent, 'power'): for t in parent.children[1:]: if t ==", "trailer = t break if trailer is not None: types", "execute these calls and check the input. This work with", "result. for mod in imports.get_modules_containing_name(evaluator, [current_module], func_name): result = get_params_for_module(mod)", "to types. result = list(chain.from_iterable(n.parent.eval(evaluator) for n in names)) debug.dbg('Dynamic", "and t.children[0] == '(': trailer = t break if trailer", "check the input. This work with a ``ParamListener``. \"\"\" from", "for function calls named ``foo`` - execute these calls and", "like this:: def foo(bar): bar. # completion here foo(1) There's", "names)) debug.dbg('Dynamic param result %s', result) return result @memoize_default([], evaluator_is_first_arg=True)", "# This is like backtracking: Get the first possible result.", "jedi.evaluate import representation as er def get_params_for_module(module): \"\"\" Returns the", "to find out what ``foo`` possibly is. \"\"\" if not", "evaluator_is_first_arg=True) def search_function_call(evaluator, func): \"\"\" Returns a list of param", "\"\"\" Returns the values of a param, or an empty", "param.get_parent_until(pr.Function) # Compare the param names. names = [n for", "pr from jedi import settings from jedi import debug from", "break finally: # cleanup: remove the listener; important: should not", "if n.value == param.name.value] # Evaluate the ExecutedParams to types.", "without analysing the whole code. You have to look for", "= [] def execute(self, params): self.param_possibilities += params @debug.increase_indent def", "compare) in undec: # Only if we have the correct", "func(\"\") It is not known what the type ``foo`` without", "def get_params_for_module(module): \"\"\" Returns the values of a param, or", "param, or an empty array. \"\"\" @memoize_default([], evaluator_is_first_arg=True) def get_posibilities(evaluator,", "the # \"original\" functions, this way we can easily compare.", "to get the params for a function. \"\"\" def __init__(self):", "names: parent = name.parent if pr.is_node(parent, 'trailer'): parent = parent.parent", "from jedi import debug from jedi.evaluate.cache import memoize_default from jedi.evaluate", "an empty array. \"\"\" @memoize_default([], evaluator_is_first_arg=True) def get_posibilities(evaluator, module, func_name):", "InstanceElements. undec = [] for escope in types: if escope.isinstance(er.Function,", "func_name = unicode(cls.name) compare = cls # add the listener", "we also have to remove InstanceElements. undec = [] for", "|jedi| is to have an option to understand code like", "[] for escope in types: if escope.isinstance(er.Function, er.Instance) \\ and", "import memoize_default from jedi.evaluate import imports class ParamListener(object): \"\"\" This", "backtracking: Get the first possible result. for mod in imports.get_modules_containing_name(evaluator,", "func.get_parent_scope() if isinstance(cls, pr.Class): func_name = unicode(cls.name) compare = cls", "of a param, or an empty array. \"\"\" @memoize_default([], evaluator_is_first_arg=True)", "the first possible result. for mod in imports.get_modules_containing_name(evaluator, [current_module], func_name):", "that's what a human would expect. It works as follows:", "param.name.value] # Evaluate the ExecutedParams to types. result = list(chain.from_iterable(n.parent.eval(evaluator)", "you try to complete a type: >>> def func(foo): ...", "params @debug.increase_indent def search_params(evaluator, param): \"\"\" A dynamic search for", "bar is an ``int`` or not, but if there's also", "\"\"\" This listener is used to get the params for", "func(1) >>> func(\"\") It is not known what the type", "n in search_function_call(evaluator, func) if n.value == param.name.value] # Evaluate", "get_params_for_module(mod) if result: break finally: # cleanup: remove the listener;", "in search_function_call(evaluator, func) if n.value == param.name.value] # Evaluate the", "get the params for a function. \"\"\" def __init__(self): self.param_possibilities", "calls and check the input. This work with a ``ParamListener``.", "``foo`` without analysing the whole code. You have to look", "option to understand code like this:: def foo(bar): bar. #", "func(foo): ... foo >>> func(1) >>> func(\"\") It is not", "func): \"\"\" Returns a list of param names. \"\"\" from", "correct function we execute # it, otherwise just ignore it.", "None: types = evaluator.goto_definition(name) # We have to remove decorators,", "all calls to ``func`` to find out what ``foo`` possibly", "to ``func`` to find out what ``foo`` possibly is. \"\"\"", "module.used_names[func_name] except KeyError: return [] for name in names: parent", "debug.dbg('Dynamic param result %s', result) return result @memoize_default([], evaluator_is_first_arg=True) def", "if trailer is not None: types = evaluator.goto_definition(name) # We", "completion here foo(1) There's no doubt wheter bar is an", "for escope in types: if escope.isinstance(er.Function, er.Instance) \\ and escope.decorates", "a param, or an empty array. \"\"\" @memoize_default([], evaluator_is_first_arg=True) def", "the param names. names = [n for n in search_function_call(evaluator,", "sees a param - search for function calls named ``foo``" ]
[ "ID (either numerical ID or vanity url: steamcommunity.com/id/moird :return: Json", "url: steamcommunity.com/id/moird :return: Json object that contains listing of all", "= {} for game in user.games: linux = False winehq", "= False if str(game.id) in linux_games: linux = True if", "game.name, \"linux\": linux, \"winehq\":winehq} except Exception as e: process_report['error'] =", "as a fallback will be taken out, really don't want", "if str(game.id) in linux_games: linux = True if game.name in", "{} for game in user.games: linux = False winehq =", "to do this. user = steamapi.user.SteamUser(userurl=name) process_report['steamuser'] = user.name process_report['image']", "os import steamapi import json @app.route('/') def index(): return render_template(\"index.html\")", "linux, \"winehq\":winehq} except Exception as e: process_report['error'] = e return", "\"steam user image url\", \"games\": [{'gametitle', {\"linux\":true}}] \"error\": \"\" }", "linux_game_list = '/app/assets/GAMES.json' winehq_list = '/app/assets/winehq.json' else: linux_game_list = './assets/GAMES.json'", "fallback will be taken out, really don't want to do", "# When we get further this as a fallback will", "user.name process_report['image'] = user.avatar process_report['games'] = {} for game in", "\"games\": [{'gametitle', {\"linux\":true}}] \"error\": \"\" } \"\"\" process_report = {}", "if game.name in winehq_apps: winehq = winehq_apps[game.name] process_report['games'][game.id] = {\"name\":", "winehq_apps = json.load(winehq_raw) steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try: user = steamapi.user.SteamUser(userid=int(name))", "\"error\": \"\" } \"\"\" process_report = {} try: # See", "import os import steamapi import json @app.route('/') def index(): return", "will be taken out, really don't want to do this.", "False if str(game.id) in linux_games: linux = True if game.name", "\"\" } \"\"\" process_report = {} try: # See if", "do this. user = steamapi.user.SteamUser(userurl=name) process_report['steamuser'] = user.name process_report['image'] =", "'/app/assets/GAMES.json' winehq_list = '/app/assets/winehq.json' else: linux_game_list = './assets/GAMES.json' winehq_list =", "steamapi.user.SteamUser(userurl=name) process_report['steamuser'] = user.name process_report['image'] = user.avatar process_report['games'] = {}", "user.games: linux = False winehq = False if str(game.id) in", "ID. Returns JSON :param name: Steam ID (either numerical ID", "winehq = False if str(game.id) in linux_games: linux = True", "and general information about them: { \"steamuser\": \"real steam name\",", "user = steamapi.user.SteamUser(userid=int(name)) except ValueError: # When we get further", "about them: { \"steamuser\": \"real steam name\", \"image\": \"steam user", "@app.route('/report/<name>') def report(name=None): \"\"\" This will generate the report based", "JSON :param name: Steam ID (either numerical ID or vanity", "image url\", \"games\": [{'gametitle', {\"linux\":true}}] \"error\": \"\" } \"\"\" process_report", "winehq_raw: winehq_apps = json.load(winehq_raw) steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try: user =", "set an environment variable for this as well. if os.path.exists('/app/assets/GAMES.json'):", "json.load(winehq_raw) steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try: user = steamapi.user.SteamUser(userid=int(name)) except ValueError:", "process_report['image'] = user.avatar process_report['games'] = {} for game in user.games:", "render_template import os import steamapi import json @app.route('/') def index():", "for this as well. if os.path.exists('/app/assets/GAMES.json'): linux_game_list = '/app/assets/GAMES.json' winehq_list", "= json.load(linux_game_list_raw) with open(winehq_list) as winehq_raw: winehq_apps = json.load(winehq_raw) steam_connection", "really don't want to do this. user = steamapi.user.SteamUser(userurl=name) process_report['steamuser']", "render_template(\"index.html\") @app.route('/report/<name>') def report(name=None): \"\"\" This will generate the report", "Steam ID (either numerical ID or vanity url: steamcommunity.com/id/moird :return:", "\"linux\": linux, \"winehq\":winehq} except Exception as e: process_report['error'] = e", "= steamapi.user.SteamUser(userid=int(name)) except ValueError: # When we get further this", "winehq_apps: winehq = winehq_apps[game.name] process_report['games'][game.id] = {\"name\": game.name, \"linux\": linux,", "steamapi.user.SteamUser(userid=int(name)) except ValueError: # When we get further this as", "def report(name=None): \"\"\" This will generate the report based on", "= '/app/assets/GAMES.json' winehq_list = '/app/assets/winehq.json' else: linux_game_list = './assets/GAMES.json' winehq_list", "heroku or not. Could probably set an environment variable for", "True if game.name in winehq_apps: winehq = winehq_apps[game.name] process_report['games'][game.id] =", "= '/app/assets/winehq.json' else: linux_game_list = './assets/GAMES.json' winehq_list = './assets/winehq.json' with", "[{'gametitle', {\"linux\":true}}] \"error\": \"\" } \"\"\" process_report = {} try:", "When we get further this as a fallback will be", "import jsonify, render_template import os import steamapi import json @app.route('/')", "'./assets/GAMES.json' winehq_list = './assets/winehq.json' with open(linux_game_list) as linux_game_list_raw: linux_games =", "name: Steam ID (either numerical ID or vanity url: steamcommunity.com/id/moird", "object that contains listing of all linux games and general", "out, really don't want to do this. user = steamapi.user.SteamUser(userurl=name)", "based on the users Steam ID. Returns JSON :param name:", "this as well. if os.path.exists('/app/assets/GAMES.json'): linux_game_list = '/app/assets/GAMES.json' winehq_list =", "{\"linux\":true}}] \"error\": \"\" } \"\"\" process_report = {} try: #", ":param name: Steam ID (either numerical ID or vanity url:", "\"\"\" process_report = {} try: # See if we are", "= './assets/GAMES.json' winehq_list = './assets/winehq.json' with open(linux_game_list) as linux_game_list_raw: linux_games", "are running on heroku or not. Could probably set an", "we are running on heroku or not. Could probably set", "\"winehq\":winehq} except Exception as e: process_report['error'] = e return jsonify(**process_report)", "= True if game.name in winehq_apps: winehq = winehq_apps[game.name] process_report['games'][game.id]", "that contains listing of all linux games and general information", "them: { \"steamuser\": \"real steam name\", \"image\": \"steam user image", "for game in user.games: linux = False winehq = False", "environment variable for this as well. if os.path.exists('/app/assets/GAMES.json'): linux_game_list =", "This will generate the report based on the users Steam", "} \"\"\" process_report = {} try: # See if we", "name\", \"image\": \"steam user image url\", \"games\": [{'gametitle', {\"linux\":true}}] \"error\":", "numerical ID or vanity url: steamcommunity.com/id/moird :return: Json object that", "linux_game_list_raw: linux_games = json.load(linux_game_list_raw) with open(winehq_list) as winehq_raw: winehq_apps =", "report(name=None): \"\"\" This will generate the report based on the", "or vanity url: steamcommunity.com/id/moird :return: Json object that contains listing", "{ \"steamuser\": \"real steam name\", \"image\": \"steam user image url\",", "jsonify, render_template import os import steamapi import json @app.route('/') def", "listing of all linux games and general information about them:", "process_report = {} try: # See if we are running", "= user.avatar process_report['games'] = {} for game in user.games: linux", "linux_games: linux = True if game.name in winehq_apps: winehq =", "import app from flask import jsonify, render_template import os import", "if os.path.exists('/app/assets/GAMES.json'): linux_game_list = '/app/assets/GAMES.json' winehq_list = '/app/assets/winehq.json' else: linux_game_list", "= steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try: user = steamapi.user.SteamUser(userid=int(name)) except ValueError: # When", "vanity url: steamcommunity.com/id/moird :return: Json object that contains listing of", "we get further this as a fallback will be taken", "contains listing of all linux games and general information about", "= False winehq = False if str(game.id) in linux_games: linux", "on heroku or not. Could probably set an environment variable", "def index(): return render_template(\"index.html\") @app.route('/report/<name>') def report(name=None): \"\"\" This will", "probably set an environment variable for this as well. if", "os.path.exists('/app/assets/GAMES.json'): linux_game_list = '/app/assets/GAMES.json' winehq_list = '/app/assets/winehq.json' else: linux_game_list =", "if we are running on heroku or not. Could probably", "user.avatar process_report['games'] = {} for game in user.games: linux =", "open(winehq_list) as winehq_raw: winehq_apps = json.load(winehq_raw) steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try:", "information about them: { \"steamuser\": \"real steam name\", \"image\": \"steam", "False winehq = False if str(game.id) in linux_games: linux =", "\"\"\" This will generate the report based on the users", "# See if we are running on heroku or not.", "str(game.id) in linux_games: linux = True if game.name in winehq_apps:", "{\"name\": game.name, \"linux\": linux, \"winehq\":winehq} except Exception as e: process_report['error']", "want to do this. user = steamapi.user.SteamUser(userurl=name) process_report['steamuser'] = user.name", "import json @app.route('/') def index(): return render_template(\"index.html\") @app.route('/report/<name>') def report(name=None):", "winehq = winehq_apps[game.name] process_report['games'][game.id] = {\"name\": game.name, \"linux\": linux, \"winehq\":winehq}", "taken out, really don't want to do this. user =", "users Steam ID. Returns JSON :param name: Steam ID (either", "Steam ID. Returns JSON :param name: Steam ID (either numerical", "steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try: user = steamapi.user.SteamUser(userid=int(name)) except ValueError: #", "= steamapi.user.SteamUser(userurl=name) process_report['steamuser'] = user.name process_report['image'] = user.avatar process_report['games'] =", "process_report['games'] = {} for game in user.games: linux = False", "with open(linux_game_list) as linux_game_list_raw: linux_games = json.load(linux_game_list_raw) with open(winehq_list) as", "(either numerical ID or vanity url: steamcommunity.com/id/moird :return: Json object", "in winehq_apps: winehq = winehq_apps[game.name] process_report['games'][game.id] = {\"name\": game.name, \"linux\":", "= {\"name\": game.name, \"linux\": linux, \"winehq\":winehq} except Exception as e:", "general information about them: { \"steamuser\": \"real steam name\", \"image\":", "games and general information about them: { \"steamuser\": \"real steam", "open(linux_game_list) as linux_game_list_raw: linux_games = json.load(linux_game_list_raw) with open(winehq_list) as winehq_raw:", "= './assets/winehq.json' with open(linux_game_list) as linux_game_list_raw: linux_games = json.load(linux_game_list_raw) with", "json.load(linux_game_list_raw) with open(winehq_list) as winehq_raw: winehq_apps = json.load(winehq_raw) steam_connection =", "from steamcheck import app from flask import jsonify, render_template import", "= user.name process_report['image'] = user.avatar process_report['games'] = {} for game", "try: user = steamapi.user.SteamUser(userid=int(name)) except ValueError: # When we get", "'/app/assets/winehq.json' else: linux_game_list = './assets/GAMES.json' winehq_list = './assets/winehq.json' with open(linux_game_list)", "linux_games = json.load(linux_game_list_raw) with open(winehq_list) as winehq_raw: winehq_apps = json.load(winehq_raw)", "ValueError: # When we get further this as a fallback", "steamapi import json @app.route('/') def index(): return render_template(\"index.html\") @app.route('/report/<name>') def", "user image url\", \"games\": [{'gametitle', {\"linux\":true}}] \"error\": \"\" } \"\"\"", "with open(winehq_list) as winehq_raw: winehq_apps = json.load(winehq_raw) steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key'])", "or not. Could probably set an environment variable for this", "linux = False winehq = False if str(game.id) in linux_games:", "Returns JSON :param name: Steam ID (either numerical ID or", "game.name in winehq_apps: winehq = winehq_apps[game.name] process_report['games'][game.id] = {\"name\": game.name,", "Json object that contains listing of all linux games and", "not. Could probably set an environment variable for this as", "in user.games: linux = False winehq = False if str(game.id)", "will generate the report based on the users Steam ID.", "get further this as a fallback will be taken out,", "linux_game_list = './assets/GAMES.json' winehq_list = './assets/winehq.json' with open(linux_game_list) as linux_game_list_raw:", "except ValueError: # When we get further this as a", "on the users Steam ID. Returns JSON :param name: Steam", "as well. if os.path.exists('/app/assets/GAMES.json'): linux_game_list = '/app/assets/GAMES.json' winehq_list = '/app/assets/winehq.json'", "report based on the users Steam ID. Returns JSON :param", "as winehq_raw: winehq_apps = json.load(winehq_raw) steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try: user", "of all linux games and general information about them: {", "json @app.route('/') def index(): return render_template(\"index.html\") @app.route('/report/<name>') def report(name=None): \"\"\"", ":return: Json object that contains listing of all linux games", "winehq_list = '/app/assets/winehq.json' else: linux_game_list = './assets/GAMES.json' winehq_list = './assets/winehq.json'", "all linux games and general information about them: { \"steamuser\":", "linux = True if game.name in winehq_apps: winehq = winehq_apps[game.name]", "winehq_list = './assets/winehq.json' with open(linux_game_list) as linux_game_list_raw: linux_games = json.load(linux_game_list_raw)", "an environment variable for this as well. if os.path.exists('/app/assets/GAMES.json'): linux_game_list", "import steamapi import json @app.route('/') def index(): return render_template(\"index.html\") @app.route('/report/<name>')", "user = steamapi.user.SteamUser(userurl=name) process_report['steamuser'] = user.name process_report['image'] = user.avatar process_report['games']", "linux games and general information about them: { \"steamuser\": \"real", "See if we are running on heroku or not. Could", "well. if os.path.exists('/app/assets/GAMES.json'): linux_game_list = '/app/assets/GAMES.json' winehq_list = '/app/assets/winehq.json' else:", "from flask import jsonify, render_template import os import steamapi import", "steam name\", \"image\": \"steam user image url\", \"games\": [{'gametitle', {\"linux\":true}}]", "\"steamuser\": \"real steam name\", \"image\": \"steam user image url\", \"games\":", "return render_template(\"index.html\") @app.route('/report/<name>') def report(name=None): \"\"\" This will generate the", "index(): return render_template(\"index.html\") @app.route('/report/<name>') def report(name=None): \"\"\" This will generate", "app from flask import jsonify, render_template import os import steamapi", "@app.route('/') def index(): return render_template(\"index.html\") @app.route('/report/<name>') def report(name=None): \"\"\" This", "steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try: user = steamapi.user.SteamUser(userid=int(name)) except ValueError: # When we", "process_report['steamuser'] = user.name process_report['image'] = user.avatar process_report['games'] = {} for", "the users Steam ID. Returns JSON :param name: Steam ID", "\"real steam name\", \"image\": \"steam user image url\", \"games\": [{'gametitle',", "don't want to do this. user = steamapi.user.SteamUser(userurl=name) process_report['steamuser'] =", "= json.load(winehq_raw) steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try: user = steamapi.user.SteamUser(userid=int(name)) except", "{} try: # See if we are running on heroku", "= {} try: # See if we are running on", "this as a fallback will be taken out, really don't", "winehq_apps[game.name] process_report['games'][game.id] = {\"name\": game.name, \"linux\": linux, \"winehq\":winehq} except Exception", "the report based on the users Steam ID. Returns JSON", "steamcheck import app from flask import jsonify, render_template import os", "steamcommunity.com/id/moird :return: Json object that contains listing of all linux", "be taken out, really don't want to do this. user", "generate the report based on the users Steam ID. Returns", "running on heroku or not. Could probably set an environment", "a fallback will be taken out, really don't want to", "else: linux_game_list = './assets/GAMES.json' winehq_list = './assets/winehq.json' with open(linux_game_list) as", "as linux_game_list_raw: linux_games = json.load(linux_game_list_raw) with open(winehq_list) as winehq_raw: winehq_apps", "this. user = steamapi.user.SteamUser(userurl=name) process_report['steamuser'] = user.name process_report['image'] = user.avatar", "'./assets/winehq.json' with open(linux_game_list) as linux_game_list_raw: linux_games = json.load(linux_game_list_raw) with open(winehq_list)", "game in user.games: linux = False winehq = False if", "in linux_games: linux = True if game.name in winehq_apps: winehq", "= winehq_apps[game.name] process_report['games'][game.id] = {\"name\": game.name, \"linux\": linux, \"winehq\":winehq} except", "process_report['games'][game.id] = {\"name\": game.name, \"linux\": linux, \"winehq\":winehq} except Exception as", "further this as a fallback will be taken out, really", "variable for this as well. if os.path.exists('/app/assets/GAMES.json'): linux_game_list = '/app/assets/GAMES.json'", "ID or vanity url: steamcommunity.com/id/moird :return: Json object that contains", "\"image\": \"steam user image url\", \"games\": [{'gametitle', {\"linux\":true}}] \"error\": \"\"", "url\", \"games\": [{'gametitle', {\"linux\":true}}] \"error\": \"\" } \"\"\" process_report =", "try: # See if we are running on heroku or", "Could probably set an environment variable for this as well.", "flask import jsonify, render_template import os import steamapi import json" ]
[ "This module should contain a function called `run_module`, that is", "import read_params from .validate import Validator def run_module(): \"\"\"Run the", "to call when running the tool. This module should contain", "from delphi_utils import read_params from .validate import Validator def run_module():", "module should contain a function called `run_module`, that is executed", "-*- \"\"\"Functions to call when running the tool. This module", "function called `run_module`, that is executed when the module is", "a function called `run_module`, that is executed when the module", "import Validator def run_module(): \"\"\"Run the validator as a module.\"\"\"", "the tool. This module should contain a function called `run_module`,", "the module is run with `python -m delphi_validator`. \"\"\" from", "as a module.\"\"\" parent_params = read_params() params = parent_params['validation'] validator", "is run with `python -m delphi_validator`. \"\"\" from delphi_utils import", "Validator def run_module(): \"\"\"Run the validator as a module.\"\"\" parent_params", "`run_module`, that is executed when the module is run with", "read_params from .validate import Validator def run_module(): \"\"\"Run the validator", "from .validate import Validator def run_module(): \"\"\"Run the validator as", "utf-8 -*- \"\"\"Functions to call when running the tool. This", "delphi_utils import read_params from .validate import Validator def run_module(): \"\"\"Run", "with `python -m delphi_validator`. \"\"\" from delphi_utils import read_params from", "module.\"\"\" parent_params = read_params() params = parent_params['validation'] validator = Validator(params)", "parent_params = read_params() params = parent_params['validation'] validator = Validator(params) validator.validate(parent_params[\"export_dir\"]).print_and_exit()", "that is executed when the module is run with `python", "\"\"\"Functions to call when running the tool. This module should", "run_module(): \"\"\"Run the validator as a module.\"\"\" parent_params = read_params()", "is executed when the module is run with `python -m", "tool. This module should contain a function called `run_module`, that", "a module.\"\"\" parent_params = read_params() params = parent_params['validation'] validator =", "validator as a module.\"\"\" parent_params = read_params() params = parent_params['validation']", "when running the tool. This module should contain a function", "-*- coding: utf-8 -*- \"\"\"Functions to call when running the", "\"\"\"Run the validator as a module.\"\"\" parent_params = read_params() params", ".validate import Validator def run_module(): \"\"\"Run the validator as a", "contain a function called `run_module`, that is executed when the", "executed when the module is run with `python -m delphi_validator`.", "# -*- coding: utf-8 -*- \"\"\"Functions to call when running", "run with `python -m delphi_validator`. \"\"\" from delphi_utils import read_params", "the validator as a module.\"\"\" parent_params = read_params() params =", "called `run_module`, that is executed when the module is run", "\"\"\" from delphi_utils import read_params from .validate import Validator def", "when the module is run with `python -m delphi_validator`. \"\"\"", "`python -m delphi_validator`. \"\"\" from delphi_utils import read_params from .validate", "delphi_validator`. \"\"\" from delphi_utils import read_params from .validate import Validator", "call when running the tool. This module should contain a", "<reponame>benjaminysmith/covidcast-indicators<filename>validator/delphi_validator/run.py # -*- coding: utf-8 -*- \"\"\"Functions to call when", "should contain a function called `run_module`, that is executed when", "coding: utf-8 -*- \"\"\"Functions to call when running the tool.", "def run_module(): \"\"\"Run the validator as a module.\"\"\" parent_params =", "running the tool. This module should contain a function called", "-m delphi_validator`. \"\"\" from delphi_utils import read_params from .validate import", "module is run with `python -m delphi_validator`. \"\"\" from delphi_utils" ]
[ "self.transform([img], depth, None); #this depth is just used to fill", "a images and a numpy array which can be None", "transform=None): self.root = Path(root) scene_list_path = self.root/'val.txt' self.scenes = [self.root/folder[:-1]", "scene_list_path = self.root/'val.txt' self.scenes = [self.root/folder[:-1] for folder in open(scene_list_path)]", "self.depth = crawl_folders(self.scenes) self.transform = transform def __getitem__(self, index): img", "as np from imageio import imread from path import Path", "def __getitem__(self, index): img = load_as_float(self.imgs[index]) depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace()", "depth is just used to fill the compose transform that", "depth.extend(current_depth) return imgs, depth def load_as_float(path): return imread(path).astype(np.float32) class ValidationSet(data.Dataset):", "in a list a images and a numpy array which", "in open(scene_list_path)] self.imgs, self.depth = crawl_folders(self.scenes) self.transform = transform def", "= crawl_folders(self.scenes) self.transform = transform def __getitem__(self, index): img =", "self.transform = transform def __getitem__(self, index): img = load_as_float(self.imgs[index]) depth", "self.transform is not None: img, _, _ = self.transform([img], depth,", "imgs = [] depth = [] for folder in folders_list:", "files are arranged in this way: root/scene_1/0000000.jpg root/scene_1/0000000.npy root/scene_1/0000001.jpg root/scene_1/0000001.npy", "from imageio import imread from path import Path import pdb", "imgs.extend(current_imgs) depth.extend(current_depth) return imgs, depth def load_as_float(path): return imread(path).astype(np.float32) class", "and a numpy array which can be None \"\"\" def", "imread from path import Path import pdb def crawl_folders(folders_list): imgs", "img.dirname()/(img.name[:-4] + '.npy') assert(d.isfile()), \"depth file {} not found\".format(str(d)) depth.append(d)", "import imread from path import Path import pdb def crawl_folders(folders_list):", "root, transform=None): self.root = Path(root) scene_list_path = self.root/'val.txt' self.scenes =", "is just used to fill the compose transform that is", "_ = self.transform([img], depth, None); #this depth is just used", "#this depth is just used to fill the compose transform", "img in current_imgs: d = img.dirname()/(img.name[:-4] + '.npy') assert(d.isfile()), \"depth", "in folders_list: current_imgs = sorted(folder.files('*.jpg')) current_depth = [] for img", "root/scene_1/0000001.jpg root/scene_1/0000001.npy .. root/scene_2/0000000.jpg root/scene_2/0000000.npy . transform functions must take", "not None: img, _, _ = self.transform([img], depth, None); #this", "load_as_float(path): return imread(path).astype(np.float32) class ValidationSet(data.Dataset): \"\"\"A sequence data loader where", "+ '.npy') assert(d.isfile()), \"depth file {} not found\".format(str(d)) depth.append(d) imgs.extend(current_imgs)", "def __init__(self, root, transform=None): self.root = Path(root) scene_list_path = self.root/'val.txt'", "be None \"\"\" def __init__(self, root, transform=None): self.root = Path(root)", "= self.transform([img], depth, None); #this depth is just used to", "for folder in open(scene_list_path)] self.imgs, self.depth = crawl_folders(self.scenes) self.transform =", "can be None \"\"\" def __init__(self, root, transform=None): self.root =", "result) img = img[0] return img, depth def __len__(self): return", "arranged in this way: root/scene_1/0000000.jpg root/scene_1/0000000.npy root/scene_1/0000001.jpg root/scene_1/0000001.npy .. root/scene_2/0000000.jpg", "def load_as_float(path): return imread(path).astype(np.float32) class ValidationSet(data.Dataset): \"\"\"A sequence data loader", "the files are arranged in this way: root/scene_1/0000000.jpg root/scene_1/0000000.npy root/scene_1/0000001.jpg", "path import Path import pdb def crawl_folders(folders_list): imgs = []", "the result) img = img[0] return img, depth def __len__(self):", "import Path import pdb def crawl_folders(folders_list): imgs = [] depth", "folder in open(scene_list_path)] self.imgs, self.depth = crawl_folders(self.scenes) self.transform = transform", "crawl_folders(self.scenes) self.transform = transform def __getitem__(self, index): img = load_as_float(self.imgs[index])", "take in a list a images and a numpy array", "in current_imgs: d = img.dirname()/(img.name[:-4] + '.npy') assert(d.isfile()), \"depth file", "data import numpy as np from imageio import imread from", "not found\".format(str(d)) depth.append(d) imgs.extend(current_imgs) depth.extend(current_depth) return imgs, depth def load_as_float(path):", "numpy array which can be None \"\"\" def __init__(self, root,", "imageio import imread from path import Path import pdb def", "root/scene_1/0000000.jpg root/scene_1/0000000.npy root/scene_1/0000001.jpg root/scene_1/0000001.npy .. root/scene_2/0000000.jpg root/scene_2/0000000.npy . transform functions", "file {} not found\".format(str(d)) depth.append(d) imgs.extend(current_imgs) depth.extend(current_depth) return imgs, depth", "imgs, depth def load_as_float(path): return imread(path).astype(np.float32) class ValidationSet(data.Dataset): \"\"\"A sequence", "the compose transform that is shared(no need for the result)", ".. root/scene_2/0000000.jpg root/scene_2/0000000.npy . transform functions must take in a", "Path(root) scene_list_path = self.root/'val.txt' self.scenes = [self.root/folder[:-1] for folder in", "= np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace() if self.transform is not None: img, _,", "#;pdb.set_trace() if self.transform is not None: img, _, _ =", "folders_list: current_imgs = sorted(folder.files('*.jpg')) current_depth = [] for img in", "'.npy') assert(d.isfile()), \"depth file {} not found\".format(str(d)) depth.append(d) imgs.extend(current_imgs) depth.extend(current_depth)", "if self.transform is not None: img, _, _ = self.transform([img],", "list a images and a numpy array which can be", "a numpy array which can be None \"\"\" def __init__(self,", "transform that is shared(no need for the result) img =", "= [] for folder in folders_list: current_imgs = sorted(folder.files('*.jpg')) current_depth", "array which can be None \"\"\" def __init__(self, root, transform=None):", "in this way: root/scene_1/0000000.jpg root/scene_1/0000000.npy root/scene_1/0000001.jpg root/scene_1/0000001.npy .. root/scene_2/0000000.jpg root/scene_2/0000000.npy", "need for the result) img = img[0] return img, depth", "transform functions must take in a list a images and", "\"\"\" def __init__(self, root, transform=None): self.root = Path(root) scene_list_path =", ". transform functions must take in a list a images", "which can be None \"\"\" def __init__(self, root, transform=None): self.root", "class ValidationSet(data.Dataset): \"\"\"A sequence data loader where the files are", "[] for folder in folders_list: current_imgs = sorted(folder.files('*.jpg')) current_depth =", "= self.root/'val.txt' self.scenes = [self.root/folder[:-1] for folder in open(scene_list_path)] self.imgs,", "= [] for img in current_imgs: d = img.dirname()/(img.name[:-4] +", "[] depth = [] for folder in folders_list: current_imgs =", "np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace() if self.transform is not None: img, _, _", "None: img, _, _ = self.transform([img], depth, None); #this depth", "self.root/'val.txt' self.scenes = [self.root/folder[:-1] for folder in open(scene_list_path)] self.imgs, self.depth", "depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace() if self.transform is not None: img,", "= [] depth = [] for folder in folders_list: current_imgs", "sequence data loader where the files are arranged in this", "root/scene_1/0000001.npy .. root/scene_2/0000000.jpg root/scene_2/0000000.npy . transform functions must take in", "to fill the compose transform that is shared(no need for", "found\".format(str(d)) depth.append(d) imgs.extend(current_imgs) depth.extend(current_depth) return imgs, depth def load_as_float(path): return", "root/scene_1/0000000.npy root/scene_1/0000001.jpg root/scene_1/0000001.npy .. root/scene_2/0000000.jpg root/scene_2/0000000.npy . transform functions must", "folder in folders_list: current_imgs = sorted(folder.files('*.jpg')) current_depth = [] for", "depth, None); #this depth is just used to fill the", "way: root/scene_1/0000000.jpg root/scene_1/0000000.npy root/scene_1/0000001.jpg root/scene_1/0000001.npy .. root/scene_2/0000000.jpg root/scene_2/0000000.npy . transform", "= transform def __getitem__(self, index): img = load_as_float(self.imgs[index]) depth =", "for the result) img = img[0] return img, depth def", "is not None: img, _, _ = self.transform([img], depth, None);", "compose transform that is shared(no need for the result) img", "np from imageio import imread from path import Path import", "where the files are arranged in this way: root/scene_1/0000000.jpg root/scene_1/0000000.npy", "functions must take in a list a images and a", "None); #this depth is just used to fill the compose", "import torch.utils.data as data import numpy as np from imageio", "imread(path).astype(np.float32) class ValidationSet(data.Dataset): \"\"\"A sequence data loader where the files", "root/scene_2/0000000.jpg root/scene_2/0000000.npy . transform functions must take in a list", "open(scene_list_path)] self.imgs, self.depth = crawl_folders(self.scenes) self.transform = transform def __getitem__(self,", "[] for img in current_imgs: d = img.dirname()/(img.name[:-4] + '.npy')", "def crawl_folders(folders_list): imgs = [] depth = [] for folder", "images and a numpy array which can be None \"\"\"", "[self.root/folder[:-1] for folder in open(scene_list_path)] self.imgs, self.depth = crawl_folders(self.scenes) self.transform", "depth.append(d) imgs.extend(current_imgs) depth.extend(current_depth) return imgs, depth def load_as_float(path): return imread(path).astype(np.float32)", "from path import Path import pdb def crawl_folders(folders_list): imgs =", "numpy as np from imageio import imread from path import", "this way: root/scene_1/0000000.jpg root/scene_1/0000000.npy root/scene_1/0000001.jpg root/scene_1/0000001.npy .. root/scene_2/0000000.jpg root/scene_2/0000000.npy .", "{} not found\".format(str(d)) depth.append(d) imgs.extend(current_imgs) depth.extend(current_depth) return imgs, depth def", "torch.utils.data as data import numpy as np from imageio import", "__init__(self, root, transform=None): self.root = Path(root) scene_list_path = self.root/'val.txt' self.scenes", "\"depth file {} not found\".format(str(d)) depth.append(d) imgs.extend(current_imgs) depth.extend(current_depth) return imgs,", "for folder in folders_list: current_imgs = sorted(folder.files('*.jpg')) current_depth = []", "= [self.root/folder[:-1] for folder in open(scene_list_path)] self.imgs, self.depth = crawl_folders(self.scenes)", "loader where the files are arranged in this way: root/scene_1/0000000.jpg", "= img.dirname()/(img.name[:-4] + '.npy') assert(d.isfile()), \"depth file {} not found\".format(str(d))", "__getitem__(self, index): img = load_as_float(self.imgs[index]) depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace() if", "fill the compose transform that is shared(no need for the", "must take in a list a images and a numpy", "crawl_folders(folders_list): imgs = [] depth = [] for folder in", "as data import numpy as np from imageio import imread", "d = img.dirname()/(img.name[:-4] + '.npy') assert(d.isfile()), \"depth file {} not", "depth def load_as_float(path): return imread(path).astype(np.float32) class ValidationSet(data.Dataset): \"\"\"A sequence data", "used to fill the compose transform that is shared(no need", "a list a images and a numpy array which can", "are arranged in this way: root/scene_1/0000000.jpg root/scene_1/0000000.npy root/scene_1/0000001.jpg root/scene_1/0000001.npy ..", "\"\"\"A sequence data loader where the files are arranged in", "self.root = Path(root) scene_list_path = self.root/'val.txt' self.scenes = [self.root/folder[:-1] for", "current_depth = [] for img in current_imgs: d = img.dirname()/(img.name[:-4]", "_, _ = self.transform([img], depth, None); #this depth is just", "= load_as_float(self.imgs[index]) depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace() if self.transform is not", "img, _, _ = self.transform([img], depth, None); #this depth is", "img = load_as_float(self.imgs[index]) depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace() if self.transform is", "ValidationSet(data.Dataset): \"\"\"A sequence data loader where the files are arranged", "just used to fill the compose transform that is shared(no", "img = img[0] return img, depth def __len__(self): return len(self.imgs)", "load_as_float(self.imgs[index]) depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace() if self.transform is not None:", "return imread(path).astype(np.float32) class ValidationSet(data.Dataset): \"\"\"A sequence data loader where the", "for img in current_imgs: d = img.dirname()/(img.name[:-4] + '.npy') assert(d.isfile()),", "return imgs, depth def load_as_float(path): return imread(path).astype(np.float32) class ValidationSet(data.Dataset): \"\"\"A", "import pdb def crawl_folders(folders_list): imgs = [] depth = []", "= sorted(folder.files('*.jpg')) current_depth = [] for img in current_imgs: d", "assert(d.isfile()), \"depth file {} not found\".format(str(d)) depth.append(d) imgs.extend(current_imgs) depth.extend(current_depth) return", "is shared(no need for the result) img = img[0] return", "current_imgs = sorted(folder.files('*.jpg')) current_depth = [] for img in current_imgs:", "self.scenes = [self.root/folder[:-1] for folder in open(scene_list_path)] self.imgs, self.depth =", "root/scene_2/0000000.npy . transform functions must take in a list a", "= Path(root) scene_list_path = self.root/'val.txt' self.scenes = [self.root/folder[:-1] for folder", "None \"\"\" def __init__(self, root, transform=None): self.root = Path(root) scene_list_path", "depth = [] for folder in folders_list: current_imgs = sorted(folder.files('*.jpg'))", "data loader where the files are arranged in this way:", "Path import pdb def crawl_folders(folders_list): imgs = [] depth =", "pdb def crawl_folders(folders_list): imgs = [] depth = [] for", "self.imgs, self.depth = crawl_folders(self.scenes) self.transform = transform def __getitem__(self, index):", "shared(no need for the result) img = img[0] return img,", "sorted(folder.files('*.jpg')) current_depth = [] for img in current_imgs: d =", "index): img = load_as_float(self.imgs[index]) depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace() if self.transform", "that is shared(no need for the result) img = img[0]", "import numpy as np from imageio import imread from path", "transform def __getitem__(self, index): img = load_as_float(self.imgs[index]) depth = np.load(self.depth[index]).astype(np.float32)", "current_imgs: d = img.dirname()/(img.name[:-4] + '.npy') assert(d.isfile()), \"depth file {}" ]
[ "key: is not used :param alphabet: is not used :type", "Text to decrypt :param key: is not used :param alphabet:", "decrypt(self, text, key=None, alphabet=None): \"\"\" Decryption method :param text: Text", "#!/usr/bin/python from .rot13 import Rot13 import secretpy.alphabets as al class", "Rot13() def __init__(self): alphabet = al.ENGLISH half = len(alphabet) >>", "alphabet[half:] + al.DECIMAL[5:] def __crypt(self, text, alphabet): return self.__rot13.encrypt(text, alphabet=self.__alphabet)", "encrypt :param key: is not used :param alphabet: is not", "string :type key: integer :type alphabet: string :return: text :rtype:", ":type key: integer :type alphabet: string :return: text :rtype: string", "= len(alphabet) >> 1 self.__alphabet = alphabet[:half] + al.DECIMAL[:5] +", "alphabet=None): \"\"\" Decryption method :param text: Text to decrypt :param", "len(alphabet) >> 1 self.__alphabet = alphabet[:half] + al.DECIMAL[:5] + alphabet[half:]", "decrypt :param key: is not used :param alphabet: is not", "\"\"\" Encryption method :param text: Text to encrypt :param key:", "text: string :type key: integer :type alphabet: string :return: text", "= alphabet[:half] + al.DECIMAL[:5] + alphabet[half:] + al.DECIMAL[5:] def __crypt(self,", "self.__crypt(text, self.__alphabet) def decrypt(self, text, key=None, alphabet=None): \"\"\" Decryption method", "alphabet=self.__alphabet) def encrypt(self, text, key=None, alphabet=None): \"\"\" Encryption method :param", ":type alphabet: string :return: text :rtype: string \"\"\" return self.__crypt(text,", "\"\"\" The Rot18 Cipher \"\"\" __rot13 = Rot13() def __init__(self):", "alphabet=None): \"\"\" Encryption method :param text: Text to encrypt :param", "self.__alphabet) def decrypt(self, text, key=None, alphabet=None): \"\"\" Decryption method :param", ":rtype: string \"\"\" return self.__crypt(text, self.__alphabet) def decrypt(self, text, key=None,", "Text to encrypt :param key: is not used :param alphabet:", "not used :type text: string :type key: integer :type alphabet:", "The Rot18 Cipher \"\"\" __rot13 = Rot13() def __init__(self): alphabet", "Rot18: \"\"\" The Rot18 Cipher \"\"\" __rot13 = Rot13() def", "import secretpy.alphabets as al class Rot18: \"\"\" The Rot18 Cipher", "is not used :type text: string :type key: integer :type", "Encryption method :param text: Text to encrypt :param key: is", "\"\"\" return self.__crypt(text, self.__alphabet) def decrypt(self, text, key=None, alphabet=None): \"\"\"", "\"\"\" __rot13 = Rot13() def __init__(self): alphabet = al.ENGLISH half", "= al.ENGLISH half = len(alphabet) >> 1 self.__alphabet = alphabet[:half]", "al.DECIMAL[:5] + alphabet[half:] + al.DECIMAL[5:] def __crypt(self, text, alphabet): return", ":return: text :rtype: string \"\"\" return self.__crypt(text, self.__alphabet) def get_fixed_alphabet(self):", "alphabet = al.ENGLISH half = len(alphabet) >> 1 self.__alphabet =", "string :return: text :rtype: string \"\"\" return self.__crypt(text, self.__alphabet) def", "return self.__crypt(text, self.__alphabet) def decrypt(self, text, key=None, alphabet=None): \"\"\" Decryption", "+ al.DECIMAL[5:] def __crypt(self, text, alphabet): return self.__rot13.encrypt(text, alphabet=self.__alphabet) def", "+ alphabet[half:] + al.DECIMAL[5:] def __crypt(self, text, alphabet): return self.__rot13.encrypt(text,", "__crypt(self, text, alphabet): return self.__rot13.encrypt(text, alphabet=self.__alphabet) def encrypt(self, text, key=None,", ">> 1 self.__alphabet = alphabet[:half] + al.DECIMAL[:5] + alphabet[half:] +", ".rot13 import Rot13 import secretpy.alphabets as al class Rot18: \"\"\"", "not used :param alphabet: is not used :type text: string", "import Rot13 import secretpy.alphabets as al class Rot18: \"\"\" The", "def encrypt(self, text, key=None, alphabet=None): \"\"\" Encryption method :param text:", "1 self.__alphabet = alphabet[:half] + al.DECIMAL[:5] + alphabet[half:] + al.DECIMAL[5:]", "is not used :param alphabet: is not used :type text:", "alphabet[:half] + al.DECIMAL[:5] + alphabet[half:] + al.DECIMAL[5:] def __crypt(self, text,", "key=None, alphabet=None): \"\"\" Decryption method :param text: Text to decrypt", "text: Text to decrypt :param key: is not used :param", "alphabet: string :return: text :rtype: string \"\"\" return self.__crypt(text, self.__alphabet)", "al.ENGLISH half = len(alphabet) >> 1 self.__alphabet = alphabet[:half] +", ":return: text :rtype: string \"\"\" return self.__crypt(text, self.__alphabet) def decrypt(self,", "Rot13 import secretpy.alphabets as al class Rot18: \"\"\" The Rot18", ":type text: string :type key: integer :type alphabet: string :return:", "integer :type alphabet: string :return: text :rtype: string \"\"\" return", ":param key: is not used :param alphabet: is not used", "self.__rot13.encrypt(text, alphabet=self.__alphabet) def encrypt(self, text, key=None, alphabet=None): \"\"\" Encryption method", "Cipher \"\"\" __rot13 = Rot13() def __init__(self): alphabet = al.ENGLISH", "alphabet): return self.__rot13.encrypt(text, alphabet=self.__alphabet) def encrypt(self, text, key=None, alphabet=None): \"\"\"", "method :param text: Text to encrypt :param key: is not", "string \"\"\" return self.__crypt(text, self.__alphabet) def decrypt(self, text, key=None, alphabet=None):", "Rot18 Cipher \"\"\" __rot13 = Rot13() def __init__(self): alphabet =", "encrypt(self, text, key=None, alphabet=None): \"\"\" Encryption method :param text: Text", "alphabet: is not used :type text: string :type key: integer", "__rot13 = Rot13() def __init__(self): alphabet = al.ENGLISH half =", ":param text: Text to decrypt :param key: is not used", "<gh_stars>10-100 #!/usr/bin/python from .rot13 import Rot13 import secretpy.alphabets as al", ":param text: Text to encrypt :param key: is not used", "secretpy.alphabets as al class Rot18: \"\"\" The Rot18 Cipher \"\"\"", "def __init__(self): alphabet = al.ENGLISH half = len(alphabet) >> 1", "used :param alphabet: is not used :type text: string :type", "method :param text: Text to decrypt :param key: is not", "used :type text: string :type key: integer :type alphabet: string", "al class Rot18: \"\"\" The Rot18 Cipher \"\"\" __rot13 =", "self.__alphabet = alphabet[:half] + al.DECIMAL[:5] + alphabet[half:] + al.DECIMAL[5:] def", "text :rtype: string \"\"\" return self.__crypt(text, self.__alphabet) def decrypt(self, text,", "return self.__rot13.encrypt(text, alphabet=self.__alphabet) def encrypt(self, text, key=None, alphabet=None): \"\"\" Encryption", "text, alphabet): return self.__rot13.encrypt(text, alphabet=self.__alphabet) def encrypt(self, text, key=None, alphabet=None):", "half = len(alphabet) >> 1 self.__alphabet = alphabet[:half] + al.DECIMAL[:5]", "+ al.DECIMAL[:5] + alphabet[half:] + al.DECIMAL[5:] def __crypt(self, text, alphabet):", "def __crypt(self, text, alphabet): return self.__rot13.encrypt(text, alphabet=self.__alphabet) def encrypt(self, text,", "text: Text to encrypt :param key: is not used :param", "def decrypt(self, text, key=None, alphabet=None): \"\"\" Decryption method :param text:", "__init__(self): alphabet = al.ENGLISH half = len(alphabet) >> 1 self.__alphabet", "key=None, alphabet=None): \"\"\" Encryption method :param text: Text to encrypt", ":rtype: string \"\"\" return self.__crypt(text, self.__alphabet) def get_fixed_alphabet(self): return self.__alphabet", ":param alphabet: is not used :type text: string :type key:", "= Rot13() def __init__(self): alphabet = al.ENGLISH half = len(alphabet)", "al.DECIMAL[5:] def __crypt(self, text, alphabet): return self.__rot13.encrypt(text, alphabet=self.__alphabet) def encrypt(self,", "class Rot18: \"\"\" The Rot18 Cipher \"\"\" __rot13 = Rot13()", "\"\"\" Decryption method :param text: Text to decrypt :param key:", "to encrypt :param key: is not used :param alphabet: is", "key: integer :type alphabet: string :return: text :rtype: string \"\"\"", "from .rot13 import Rot13 import secretpy.alphabets as al class Rot18:", "text, key=None, alphabet=None): \"\"\" Decryption method :param text: Text to", "Decryption method :param text: Text to decrypt :param key: is", "to decrypt :param key: is not used :param alphabet: is", "text, key=None, alphabet=None): \"\"\" Encryption method :param text: Text to", "as al class Rot18: \"\"\" The Rot18 Cipher \"\"\" __rot13", "text :rtype: string \"\"\" return self.__crypt(text, self.__alphabet) def get_fixed_alphabet(self): return" ]
[ "install(cls, database): to_save = False for expected in cls.properties: if", "expected ): database.remove_prop_type(expected.name) database.add_prop_type(expected) to_save = True if to_save: database.save()", "def install(cls, database): to_save = False for expected in cls.properties:", "@classmethod def set(cls, video: Video): for prop in cls.properties: video.properties[prop.name]", "() @abstractmethod def get(self, video: Video): raise NotImplementedError() class PropError(SpecialPropType):", "for prop in cls.properties) @classmethod def set(cls, video: Video): for", "()))) class SpecialProperties: properties = [PropError()] @classmethod def install(cls, database):", "in video.properties for prop in cls.properties) @classmethod def set(cls, video:", "def get(self, video: Video): return sorted(set(video.errors) | set(video.properties.get(self.name, ()))) class", "all(prop.name in video.properties for prop in cls.properties) @classmethod def set(cls,", "<reponame>notoraptor/pysaurus from abc import abstractmethod from pysaurus.database.properties import PropType from", "video: Video): raise NotImplementedError() class PropError(SpecialPropType): __slots__ = () def", "Video): raise NotImplementedError() class PropError(SpecialPropType): __slots__ = () def __init__(self):", "in cls.properties) @classmethod def set(cls, video: Video): for prop in", "SpecialPropType(PropType): __slots__ = () @abstractmethod def get(self, video: Video): raise", "__init__(self): super().__init__(\"<error>\", \"\", True) def get(self, video: Video): return sorted(set(video.errors)", "\"\", True) def get(self, video: Video): return sorted(set(video.errors) | set(video.properties.get(self.name,", "): database.remove_prop_type(expected.name) database.add_prop_type(expected) to_save = True if to_save: database.save() @classmethod", "raise NotImplementedError() class PropError(SpecialPropType): __slots__ = () def __init__(self): super().__init__(\"<error>\",", "Video class SpecialPropType(PropType): __slots__ = () @abstractmethod def get(self, video:", "return sorted(set(video.errors) | set(video.properties.get(self.name, ()))) class SpecialProperties: properties = [PropError()]", "not database.has_prop_type(expected.name) or database.get_prop_type(expected.name) != expected ): database.remove_prop_type(expected.name) database.add_prop_type(expected) to_save", "NotImplementedError() class PropError(SpecialPropType): __slots__ = () def __init__(self): super().__init__(\"<error>\", \"\",", "= () @abstractmethod def get(self, video: Video): raise NotImplementedError() class", "database.add_prop_type(expected) to_save = True if to_save: database.save() @classmethod def all_in(cls,", "class SpecialProperties: properties = [PropError()] @classmethod def install(cls, database): to_save", "video: Video): return sorted(set(video.errors) | set(video.properties.get(self.name, ()))) class SpecialProperties: properties", "PropError(SpecialPropType): __slots__ = () def __init__(self): super().__init__(\"<error>\", \"\", True) def", "to_save = False for expected in cls.properties: if ( not", "all_in(cls, video: Video): return all(prop.name in video.properties for prop in", "set(cls, video: Video): for prop in cls.properties: video.properties[prop.name] = prop.get(video)", "!= expected ): database.remove_prop_type(expected.name) database.add_prop_type(expected) to_save = True if to_save:", "cls.properties) @classmethod def set(cls, video: Video): for prop in cls.properties:", "__slots__ = () def __init__(self): super().__init__(\"<error>\", \"\", True) def get(self,", "| set(video.properties.get(self.name, ()))) class SpecialProperties: properties = [PropError()] @classmethod def", "import abstractmethod from pysaurus.database.properties import PropType from pysaurus.database.video import Video", "class SpecialPropType(PropType): __slots__ = () @abstractmethod def get(self, video: Video):", "def get(self, video: Video): raise NotImplementedError() class PropError(SpecialPropType): __slots__ =", "video.properties for prop in cls.properties) @classmethod def set(cls, video: Video):", "True if to_save: database.save() @classmethod def all_in(cls, video: Video): return", "if to_save: database.save() @classmethod def all_in(cls, video: Video): return all(prop.name", "from pysaurus.database.video import Video class SpecialPropType(PropType): __slots__ = () @abstractmethod", "= True if to_save: database.save() @classmethod def all_in(cls, video: Video):", "Video): return sorted(set(video.errors) | set(video.properties.get(self.name, ()))) class SpecialProperties: properties =", "= False for expected in cls.properties: if ( not database.has_prop_type(expected.name)", "( not database.has_prop_type(expected.name) or database.get_prop_type(expected.name) != expected ): database.remove_prop_type(expected.name) database.add_prop_type(expected)", "import Video class SpecialPropType(PropType): __slots__ = () @abstractmethod def get(self,", "database.remove_prop_type(expected.name) database.add_prop_type(expected) to_save = True if to_save: database.save() @classmethod def", "set(video.properties.get(self.name, ()))) class SpecialProperties: properties = [PropError()] @classmethod def install(cls,", "def all_in(cls, video: Video): return all(prop.name in video.properties for prop", "to_save = True if to_save: database.save() @classmethod def all_in(cls, video:", "prop in cls.properties) @classmethod def set(cls, video: Video): for prop", "@classmethod def install(cls, database): to_save = False for expected in", "for expected in cls.properties: if ( not database.has_prop_type(expected.name) or database.get_prop_type(expected.name)", "() def __init__(self): super().__init__(\"<error>\", \"\", True) def get(self, video: Video):", "@classmethod def all_in(cls, video: Video): return all(prop.name in video.properties for", "database.get_prop_type(expected.name) != expected ): database.remove_prop_type(expected.name) database.add_prop_type(expected) to_save = True if", "super().__init__(\"<error>\", \"\", True) def get(self, video: Video): return sorted(set(video.errors) |", "to_save: database.save() @classmethod def all_in(cls, video: Video): return all(prop.name in", "False for expected in cls.properties: if ( not database.has_prop_type(expected.name) or", "get(self, video: Video): return sorted(set(video.errors) | set(video.properties.get(self.name, ()))) class SpecialProperties:", "Video): return all(prop.name in video.properties for prop in cls.properties) @classmethod", "return all(prop.name in video.properties for prop in cls.properties) @classmethod def", "from abc import abstractmethod from pysaurus.database.properties import PropType from pysaurus.database.video", "cls.properties: if ( not database.has_prop_type(expected.name) or database.get_prop_type(expected.name) != expected ):", "= [PropError()] @classmethod def install(cls, database): to_save = False for", "database.has_prop_type(expected.name) or database.get_prop_type(expected.name) != expected ): database.remove_prop_type(expected.name) database.add_prop_type(expected) to_save =", "__slots__ = () @abstractmethod def get(self, video: Video): raise NotImplementedError()", "sorted(set(video.errors) | set(video.properties.get(self.name, ()))) class SpecialProperties: properties = [PropError()] @classmethod", "PropType from pysaurus.database.video import Video class SpecialPropType(PropType): __slots__ = ()", "from pysaurus.database.properties import PropType from pysaurus.database.video import Video class SpecialPropType(PropType):", "or database.get_prop_type(expected.name) != expected ): database.remove_prop_type(expected.name) database.add_prop_type(expected) to_save = True", "abstractmethod from pysaurus.database.properties import PropType from pysaurus.database.video import Video class", "pysaurus.database.properties import PropType from pysaurus.database.video import Video class SpecialPropType(PropType): __slots__", "@abstractmethod def get(self, video: Video): raise NotImplementedError() class PropError(SpecialPropType): __slots__", "database): to_save = False for expected in cls.properties: if (", "abc import abstractmethod from pysaurus.database.properties import PropType from pysaurus.database.video import", "get(self, video: Video): raise NotImplementedError() class PropError(SpecialPropType): __slots__ = ()", "def set(cls, video: Video): for prop in cls.properties: video.properties[prop.name] =", "database.save() @classmethod def all_in(cls, video: Video): return all(prop.name in video.properties", "expected in cls.properties: if ( not database.has_prop_type(expected.name) or database.get_prop_type(expected.name) !=", "def __init__(self): super().__init__(\"<error>\", \"\", True) def get(self, video: Video): return", "if ( not database.has_prop_type(expected.name) or database.get_prop_type(expected.name) != expected ): database.remove_prop_type(expected.name)", "in cls.properties: if ( not database.has_prop_type(expected.name) or database.get_prop_type(expected.name) != expected", "= () def __init__(self): super().__init__(\"<error>\", \"\", True) def get(self, video:", "import PropType from pysaurus.database.video import Video class SpecialPropType(PropType): __slots__ =", "properties = [PropError()] @classmethod def install(cls, database): to_save = False", "SpecialProperties: properties = [PropError()] @classmethod def install(cls, database): to_save =", "video: Video): return all(prop.name in video.properties for prop in cls.properties)", "True) def get(self, video: Video): return sorted(set(video.errors) | set(video.properties.get(self.name, ())))", "[PropError()] @classmethod def install(cls, database): to_save = False for expected", "pysaurus.database.video import Video class SpecialPropType(PropType): __slots__ = () @abstractmethod def", "class PropError(SpecialPropType): __slots__ = () def __init__(self): super().__init__(\"<error>\", \"\", True)" ]
[ "implied_roles = self.__class__._role_inferences_mapping.get( role_id, set()) role_names = {self.__class__._role_map[rid] for rid", "test runtime in the case where # ``[identity] admin_role`` ==", "in cls._role_map.items()}) @classmethod def _create_user_role_on_project(cls, role_ids): for role_id in role_ids:", "admin_resource_id=admin_resource_id) as ctx: # the list of resources available for", "not assigned. \"\"\" if self.resources is None: raise rbac_exceptions.RbacValidateListException( reason=\"ctx.resources", "in rule['implies']} res[prior_role] = implies return res raw_data = cls.admin_roles_client.list_all_role_inference_rules()", "oslo_utils import excutils from tempest import config from tempest.lib import", "the current test role equals the admin role. :returns: True", "as lib_exc from patrole_tempest_plugin import rbac_exceptions CONF = config.CONF LOG", "defined by ``CONF.identity.admin_role`` and ``CONF.patrole.rbac_test_roles``. \"\"\" credentials = ['primary', 'admin']", "self._admin_len: raise rbac_exceptions.RbacValidateListException( reason=\"the list of admin resources cannot be", "super(RbacUtilsMixin, cls).setup_clients() @classmethod def _prepare_role_inferences_mapping(cls): \"\"\"Preparing roles mapping to support", "@contextlib.contextmanager def override_role(self): \"\"\"Override the role used by ``os_primary`` Tempest", "from tempest import config from tempest.lib import exceptions as lib_exc", "# \"Member\" role, then we must delete the \"admin\" role.", "cls.hosts_client = cls.os_primary.hosts_client ... This class is responsible for overriding", "\"\"\"Verifies whether the current test role equals the admin role.", "is removed if CONF.patrole.rbac_test_role: if not roles: roles.append(CONF.patrole.rbac_test_role) for role_name", "in the test is not executed. \"\"\" self._set_override_role_called() self._override_role(True) try:", "doing so, it is possible to seamlessly swap between admin", "implies = {r['id'] for r in rule['implies']} res[prior_role] = implies", "\" \"RBAC testing.\") if not admin_role_id: missing_roles.append(CONF.identity.admin_role) if not all(rbac_role_ids):", "under the License. import contextlib import sys import time from", "= self.__class__._role_map.get(role) implied_roles = self.__class__._role_inferences_mapping.get( role_id, set()) role_names = {self.__class__._role_map[rid]", "is not None and not admin_resource_id: self._admin_len = len(admin_resources) if", "Example:: @rbac_rule_validation.action(service='test', rules=['a:test:rule']) def test_foo(self): # Allocate test-level resources here.", "to ensure we are # passing the second boundary before", "role overriding only supports v3 identity API.\") cls.admin_roles_client = admin_roles_client", "in compliance with the License. You may obtain # a", "(i.e. ``os_primary`` role). By doing so, it is possible to", "not roles_already_present: cls._create_user_role_on_project(target_roles) except Exception as exp: with excutils.save_and_reraise_exception(): LOG.exception(exp)", "import contextlib import sys import time from oslo_log import log", "runtime in the case where # ``[identity] admin_role`` == ``[patrole]", "\"reader\"}], \"prior_role\": {\"id\": \"2\", \"name\": \"member\"} }, { \"implies\": [{\"id\":", "third party client auth_providers. \"\"\" return [cls.os_primary.auth_provider] def _set_override_role_called(self): \"\"\"Helper", "removed if CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role) roles = list(set(roles)) # TODO(felipemonteiro): Make", "+= \" Available roles: %s.\" % \", \".join(cls._role_map) raise rbac_exceptions.RbacResourceSetupFailed(msg)", "= cls.admin_roles_client.list_roles()['roles'] cls._role_map = {r['name']: r['id'] for r in available_roles}", "role in res.copy(): role_id = self.__class__._role_map.get(role) implied_roles = self.__class__._role_inferences_mapping.get( role_id,", "drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: if not roles:", "admin_resources is not None and not admin_resource_id: self._admin_len = len(admin_resources)", "class. Child classes should not use this mixin. Example:: class", "admin_role_id = cls._role_map.get(CONF.identity.admin_role) if not all([admin_role_id, all(rbac_role_ids)]): missing_roles = []", "for test clean # up. self._override_role(False) @classmethod def _override_role(cls, toggle_rbac_role=False):", "= cls.os_primary.hosts_client ... This class is responsible for overriding the", "_project_id = None _user_id = None _role_map = None _role_inferences_mapping", "the ``override_role_and_validate_list`` function. To validate will be used the ``_validate_resource``", "# Do not override roles if `target_role` already exists. if", "cls._role_map.update({v: k for k, v in cls._role_map.items()}) @classmethod def _create_user_role_on_project(cls,", "admin_role_id cls._rbac_role_ids = rbac_role_ids # Adding backward mapping cls._role_map.update({v: k", "cls.addClassResourceCleanup(cls.restore_roles) # Change default role to admin cls._override_role(False) super(RbacUtilsMixin, cls).setup_clients()", "user's roles on the project are an exact match. if", "data.get(role_id, set()) for rid in roles.copy(): roles.update(process_roles(rid, data)) return roles", "this function. Example:: @rbac_rule_validation.action(service='test', rules=['a:test:rule']) def test_foo(self): # Allocate test-level", "and limitations # under the License. import contextlib import sys", "sequential tests. \"\"\" was_called = self.__override_role_called self.__override_role_called = False return", "call which does policy enforcement. The primary credentials always cycle", "restore_roles(cls): if cls._orig_roles: LOG.info(\"Restoring original roles %s\", cls._orig_roles) roles_already_present =", "Reserved. # # Licensed under the Apache License, Version 2.0", "resources received before calling the ``override_role_and_validate_list`` function. To validate will", "of :py:class:`tempest.test.BaseTestCase` to perform Patrole class setup for a base", "*args, **kwargs): super(RbacUtilsMixin, self).__init__(*args, **kwargs) # Shows if override_role was", "self.resources = None if admin_resources is not None and not", "keystone API returns all inference rules, which makes it possible", "in role_ids: cls.admin_roles_client.create_user_role_on_project( cls._project_id, cls._user_id, role_id) @classmethod def _list_and_clear_user_roles_on_project(cls, role_ids):", "True: role is set to ``[patrole] rbac_test_role`` * If False:", "is underway to safely clean up after this function. Example::", "for a base RBAC class. Child classes should not use", "\"2\", \"name\": \"member\"} }, { \"implies\": [{\"id\": \"2\", \"name\": \"member\"}],", "if not self._admin_len: raise rbac_exceptions.RbacValidateListException( reason=\"the list of admin resources", "user's original roles and rollback after testing. roles = cls.admin_roles_client.list_user_roles_on_project(", "``CONF.patrole.rbac_test_roles``. \"\"\" credentials = ['primary', 'admin'] def __init__(self, *args, **kwargs):", "if ``rbac_test_roles`` contain the admin role. \"\"\" roles = CONF.patrole.rbac_test_roles", "cls._admin_role_id = admin_role_id cls._rbac_role_ids = rbac_role_ids # Adding backward mapping", "authenticate. # Only sleep if a token revocation occurred as", "required for \" \"RBAC testing.\") if not admin_role_id: missing_roles.append(CONF.identity.admin_role) if", "API.\") cls.admin_roles_client = admin_roles_client cls._project_id = cls.os_primary.credentials.tenant_id cls._user_id = cls.os_primary.credentials.user_id", "role # overriding. This will optimize test runtime in the", "identity API.\") cls.admin_roles_client = admin_roles_client cls._project_id = cls.os_primary.credentials.tenant_id cls._user_id =", "of resources received before calling the ``override_role_and_validate_list`` function. To validate", "``tempest.test.BaseTestCase``. :param list admin_resources: The list of resources received before", "None _user_id = None _role_map = None _role_inferences_mapping = None", "False try: target_roles = (cls._rbac_role_ids if toggle_rbac_role else [cls._admin_role_id]) roles_already_present", "validate RBAC for a list API action. List actions usually", "= [] admin_roles_client = None @classmethod def restore_roles(cls): if cls._orig_roles:", "process_roles(role_id, data): roles = data.get(role_id, set()) for rid in roles.copy():", "\"1\", \"name\": \"admin\"} } ] } and converts it to", "\"\"\" self.resources = None if admin_resources is not None and", "= cls._list_and_clear_user_roles_on_project( target_roles) # Do not override roles if `target_role`", "all(rbac_role_ids)]): missing_roles = [] msg = (\"Could not find `[patrole]", "\"\"\" caught_exception = self.__override_role_caught_exc self.__override_role_caught_exc = False return caught_exception def", "if not all(rbac_role_ids): missing_roles += [role_name for role_name in roles", "admin_resource_id and admin_resources is None: self._admin_resource_id = admin_resource_id self._validate_func =", "block is always executed, no matter the result of the", "rules, which makes it possible to prepare roles mapping. It", "is not assigned. \"\"\" if self.resources is None: raise rbac_exceptions.RbacValidateListException(", "``admin_resource_id`` should be used, not both. :param list admin_resources: The", "_role_inferences_mapping = None _orig_roles = [] admin_roles_client = None @classmethod", "\"3\", \"name\": \"reader\"}], \"prior_role\": {\"id\": \"2\", \"name\": \"member\"} }, {", "classes should not use this mixin. Example:: class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest):", "override roles if `target_role` already exists. if not roles_already_present: cls._create_user_role_on_project(target_roles)", "\"\"\" was_called = self.__override_role_called self.__override_role_called = False return was_called def", "}, { \"implies\": [{\"id\": \"2\", \"name\": \"member\"}], \"prior_role\": {\"id\": \"1\",", "execution. :returns: None .. warning:: This function can alter user", "cls._project_id, cls._user_id, role_id) @classmethod def _list_and_clear_user_roles_on_project(cls, role_ids): roles = cls.admin_roles_client.list_user_roles_on_project(", "returns all inference rules, which makes it possible to prepare", "after testing. roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] cls._orig_roles = [role['id']", "enforces the # expected policy specified by \"rule\" in the", "'admin'] def __init__(self, *args, **kwargs): super(RbacUtilsMixin, self).__init__(*args, **kwargs) # Shows", "admin_resource_id=None): \"\"\"Constructor for ``ValidateListContext``. Either ``admin_resources`` or ``admin_resource_id`` should be", "self._set_override_role_caught_exc() # This code block is always executed, no matter", "to in writing, software # distributed under the License is", "an exception, any code below this # point in the", "change the role used by ``os_primary`` credentials to: * ``[patrole]", "# the resource created by admin admin_resource_id = ( self.ntp_client.create_dscp_marking_rule()", "resource created by admin admin_resource_id = ( self.ntp_client.create_dscp_marking_rule() [\"dscp_marking_rule\"][\"id']) with", "or agreed to in writing, software # distributed under the", "* If False: role is set to ``[identity] admin_role`` \"\"\"", "\"exclusive\") def _validate_len(self): \"\"\"Validates that the number of resources is", "self._override_role(True) try: # Execute the test. yield finally: # Check", "value to False for sequential tests. \"\"\" was_called = self.__override_role_called", "delete the \"admin\" role. Thus, we only # return early", "Apache License, Version 2.0 (the \"License\"); you may # not", "def _validate(self): \"\"\"Calls the proper validation function. :raises RbacValidateListException: if", "All Rights Reserved. # # Licensed under the Apache License,", "time from oslo_log import log as logging from oslo_utils import", "all_role_ids`` here # to avoid over-permission errors: if the current", "aware so sleep to ensure we are # passing the", "created by admin admin_resource_id = ( self.ntp_client.create_dscp_marking_rule() [\"dscp_marking_rule\"][\"id']) with self.override_role_and_validate_list(", "the License. import contextlib import sys import time from oslo_log", "License, Version 2.0 (the \"License\"); you may # not use", "the value of the primary Tempest credential's role (i.e. ``os_primary``", "raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate(self): \"\"\"Calls the proper validation function. :raises", "list of resources available for member role ctx.resources = self.ntp_client.list_dscp_marking_rules(", "this more robust via a context is admin # lookup.", "def process_roles(role_id, data): roles = data.get(role_id, set()) for rid in", "``[identity] admin_role`` after test execution Automatically switches to admin role", "default role of ``os_primary`` credentials. * If True: role is", "not use this file except in compliance with the License.", "self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)[\"dscp_marking_rules\"] \"\"\" ctx = _ValidateListContext(admin_resources, admin_resource_id) with self.override_role(): yield", "cls._project_id, cls._user_id)['roles'] all_role_ids = [role['id'] for role in roles] #", "\"\"\"Constructor for ``ValidateListContext``. Either ``admin_resources`` or ``admin_resource_id`` should be used,", "as a mixin class alongside an instance of :py:class:`tempest.test.BaseTestCase` to", "@classmethod def get_auth_providers(cls): \"\"\"Returns list of auth_providers used within test.", "outside (which is invalid). \"\"\" caught_exception = self.__override_role_caught_exc self.__override_role_caught_exc =", "= list_function() \"\"\" def __init__(self, admin_resources=None, admin_resource_id=None): \"\"\"Constructor for ``ValidateListContext``.", "= list(set(roles)) # TODO(felipemonteiro): Make this more robust via a", "== set(all_role_ids): return True for role in roles: cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id,", "resources available for member role ctx.resources = self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)[\"dscp_marking_rules\"] \"\"\"", "by process of elimination, it can be determined whether one", "return raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate(self): \"\"\"Calls the proper validation function.", "primary credentials always cycle between roles defined by ``CONF.identity.admin_role`` and", "specified by \"rule\" in the decorator. self.foo_service.bar_api_call() # The role", "or third party client auth_providers. \"\"\" return [cls.os_primary.auth_provider] def _set_override_role_called(self):", "not self._admin_len: raise rbac_exceptions.RbacValidateListException( reason=\"the list of admin resources cannot", "None _project_id = None _user_id = None _role_map = None", "[\"admin\"] >> [\"admin\", \"member\", \"reader\"] [\"member\"] >> [\"member\", \"reader\"] [\"reader\"]", "mapping Examples:: [\"admin\"] >> [\"admin\", \"member\", \"reader\"] [\"member\"] >> [\"member\",", "roles %s\", cls._orig_roles) roles_already_present = cls._list_and_clear_user_roles_on_project( cls._orig_roles) if not roles_already_present:", "of roles on the # project includes \"admin\" and \"Member\",", "rbac_test_roles``. if not roles_already_present: time.sleep(1) for provider in auth_providers: provider.set_auth()", "# if the API call above threw an exception, any", "roles: list of roles :return: extended list of roles \"\"\"", "converts it to the mapping:: { \"2\": [\"3\"], # \"member\":", "tempest import config from tempest.lib import exceptions as lib_exc from", "_list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules \"\"\" # noqa: E501 def process_roles(role_id, data): roles", "should not use this mixin. Example:: class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest): @classmethod", "from oslo_utils import excutils from tempest import config from tempest.lib", "called.\"\"\" self.__override_role_called = True def _set_override_role_caught_exc(self): \"\"\"Helper for tracking whether", "Copyright 2017 AT&T Corporation. # All Rights Reserved. # #", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "List actions usually do soft authorization: partial or empty response", "and admin_resource_id are mutually \" \"exclusive\") def _validate_len(self): \"\"\"Validates that", "it can be determined whether one was thrown outside (which", "tracking whether exception was thrown inside ``override_role``. \"\"\" self.__override_role_caught_exc =", "\"License\"); you may # not use this file except in", "policy_id=self.policy_id)[\"dscp_marking_rules\"] \"\"\" ctx = _ValidateListContext(admin_resources, admin_resource_id) with self.override_role(): yield ctx", "(which is invalid). \"\"\" caught_exception = self.__override_role_caught_exc self.__override_role_caught_exc = False", "used as a mixin class alongside an instance of :py:class:`tempest.test.BaseTestCase`", "\"\"\"Validates that the admin resource is present in the resources.", "of admin resources cannot be empty\") self._validate_func = self._validate_len elif", "logging.getLogger(__name__) class _ValidateListContext(object): \"\"\"Context class responsible for validation of the", "} and converts it to the mapping:: { \"2\": [\"3\"],", "ctx: ctx.resources = list_function() \"\"\" def __init__(self, admin_resources=None, admin_resource_id=None): \"\"\"Constructor", "policy enforcement. The primary credentials always cycle between roles defined", "roles: rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id = cls._role_map.get(CONF.identity.admin_role) if not all([admin_role_id, all(rbac_role_ids)]): missing_roles", "set(role_ids) == set(all_role_ids): return True for role in roles: cls.admin_roles_client.delete_role_from_user_on_project(", "[role['id'] for role in roles] # NOTE(felipemonteiro): We do not", "Fernet tokens are not subsecond aware so sleep to ensure", "function can alter user roles for pre-provisioned credentials. Work is", "threw an exception, any code below this # point in", "\"prior_role\": {\"id\": \"2\", \"name\": \"member\"} }, { \"implies\": [{\"id\": \"2\",", "no matter the result of the # test. Automatically switch", "= {r['id'] for r in rule['implies']} res[prior_role] = implies return", "is removed if CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role) roles = list(set(roles)) # TODO(felipemonteiro):", "``ctx.resources`` variable. Example:: with self.override_role_and_validate_list(...) as ctx: ctx.resources = list_function()", "for member role ctx.resources = self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)[\"dscp_marking_rules\"] \"\"\" ctx =", "if CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role) roles = list(set(roles)) # TODO(felipemonteiro): Make this", "robust via a context is admin # lookup. return CONF.identity.admin_role", "setup and clean up, and primary credentials, needed to perform", "= [role['id'] for role in roles] # NOTE(felipemonteiro): We do", "validate that ``override_role`` is called and reset its value to", "resource created before calling the ``override_role_and_validate_list`` function. To validate will", "= (\"Could not find `[patrole] rbac_test_roles` or \" \"`[identity] admin_role`,", "can be determined whether one was thrown outside (which is", "that the admin resource is present in the resources. \"\"\"", "\"\"\"Idempotently validate that exception was caught inside ``override_role``, so that,", "in res.copy(): role_id = self.__class__._role_map.get(role) implied_roles = self.__class__._role_inferences_mapping.get( role_id, set())", "the \"admin\" role. Thus, we only # return early if", "created before calling the ``override_role_and_validate_list`` function. To validate will be", "E501 def process_roles(role_id, data): roles = data.get(role_id, set()) for rid", "role to admin cls._override_role(False) super(RbacUtilsMixin, cls).setup_clients() @classmethod def _prepare_role_inferences_mapping(cls): \"\"\"Preparing", "has now been overridden. Within # this block, call the", "\", \".join(cls._role_map) raise rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id = admin_role_id cls._rbac_role_ids = rbac_role_ids", "by ``os_primary`` credentials to: * ``[patrole] rbac_test_roles`` before test execution", "None if admin_resources is not None and not admin_resource_id: self._admin_len", "return [cls.os_primary.auth_provider] def _set_override_role_called(self): \"\"\"Helper for tracking whether ``override_role`` was", "drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role) roles =", "* ``[patrole] rbac_test_roles`` before test execution * ``[identity] admin_role`` after", "# Copyright 2017 AT&T Corporation. # All Rights Reserved. #", "roles mapping. It walks recursively through the raw data:: {\"role_inferences\":", "up, and primary credentials, needed to perform the API call", "= logging.getLogger(__name__) class _ValidateListContext(object): \"\"\"Context class responsible for validation of", "[\"reader\"] [\"custom_role\"] >> [\"custom_role\"] :param roles: list of roles :return:", "includes \"admin\" and \"Member\", and we are switching to the", "list API actions. :param test_obj: Instance of ``tempest.test.BaseTestCase``. :param list", "responsible for switching ``os_primary`` role. Should be used as a", "as exp: with excutils.save_and_reraise_exception(): LOG.exception(exp) finally: auth_providers = cls.get_auth_providers() for", "with roles from mapping Examples:: [\"admin\"] >> [\"admin\", \"member\", \"reader\"]", "to include their own or third party client auth_providers. \"\"\"", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "overrides default role of ``os_primary`` credentials. * If True: role", "found: %s.\" % ( \", \".join(missing_roles)) msg += \" Available", "exists. if not roles_already_present: cls._create_user_role_on_project(target_roles) except Exception as exp: with", "set()) for rid in roles.copy(): roles.update(process_roles(rid, data)) return roles def", "= False return was_called def _validate_override_role_caught_exc(self): \"\"\"Idempotently validate that exception", "\"\"\"Extending given roles with roles from mapping Examples:: [\"admin\"] >>", "To validate will be used the ``_validate_resource`` function. :raises RbacValidateListException:", "= implies return res raw_data = cls.admin_roles_client.list_all_role_inference_rules() data = convert_data(raw_data['role_inferences'])", "roles.copy(): roles.update(process_roles(rid, data)) return roles def convert_data(data): res = {}", "of ``tempest.test.BaseTestCase``. :param list admin_resources: The list of resources received", "cls._override_role(False) super(RbacUtilsMixin, cls).setup_clients() @classmethod def _prepare_role_inferences_mapping(cls): \"\"\"Preparing roles mapping to", "Following roles were not found: %s.\" % ( \", \".join(missing_roles))", "not roles: roles.append(CONF.patrole.rbac_test_role) for role_name in roles: rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id =", "admin_resources=None, admin_resource_id=None): \"\"\"Call ``override_role`` and validate RBAC for a list", "Version 2.0 (the \"License\"); you may # not use this", "actions. :param test_obj: Instance of ``tempest.test.BaseTestCase``. :param list admin_resources: The", "to ``[identity] admin_role`` \"\"\" LOG.debug('Overriding role to: %s.', toggle_rbac_role) roles_already_present", "{ \"implies\": [{\"id\": \"3\", \"name\": \"reader\"}], \"prior_role\": {\"id\": \"2\", \"name\":", "try: # Execute the test. yield finally: # Check whether", "if the ``ctx.resources`` variable is not assigned. \"\"\" if self.resources", "admin role. :returns: True if ``rbac_test_roles`` contain the admin role.", "# Allocate test-level resources here. with self.override_role(): # The role", "Boolean value that controls the role that overrides default role", "roles.append(CONF.patrole.rbac_test_role) roles = list(set(roles)) # TODO(felipemonteiro): Make this more robust", ":returns: None .. warning:: This function can alter user roles", "# NOTE(felipemonteiro): We do not use ``role_id in all_role_ids`` here", "code below this # point in the test is not", "def override_role_and_validate_list(self, admin_resources=None, admin_resource_id=None): \"\"\"Call ``override_role`` and validate RBAC for", "{\"id\": \"2\", \"name\": \"member\"} }, { \"implies\": [{\"id\": \"2\", \"name\":", "for r in rule['implies']} res[prior_role] = implies return res raw_data", "def _validate_len(self): \"\"\"Validates that the number of resources is less", "get_auth_providers(cls): \"\"\"Returns list of auth_providers used within test. Tests may", "\"\"\" return [cls.os_primary.auth_provider] def _set_override_role_called(self): \"\"\"Helper for tracking whether ``override_role``", "here. with self.override_role(): # The role for `os_primary` has now", "admin role after test execution. :returns: None .. warning:: This", "cls.os_primary.hosts_client ... This class is responsible for overriding the value", "@rbac_rule_validation.action(service='test', rules=['a:test:rule']) def test_foo(self): # Allocate test-level resources here. with", "revocation occurred as a result of role # overriding. This", "\"\"\"Context class responsible for validation of the list functions. This", "_list_and_clear_user_roles_on_project(cls, role_ids): roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] all_role_ids = [role['id']", "It walks recursively through the raw data:: {\"role_inferences\": [ {", "{ \"2\": [\"3\"], # \"member\": [\"reader\"], \"1\": [\"2\", \"3\"] #", "self._validate_resource else: raise rbac_exceptions.RbacValidateListException( reason=\"admin_resources and admin_resource_id are mutually \"", "functions. This class is used in ``override_role_and_validate_list`` function and the", "partial or empty response bodies are returned instead of exceptions.", "compliance with the License. You may obtain # a copy", "def test_foo(self): # Allocate test-level resources here. with self.override_role(): #", "raise rbac_exceptions.RbacEmptyResponseBody() elif self._admin_len > len(self.resources): raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate_resource(self):", "role_id = self.__class__._role_map.get(role) implied_roles = self.__class__._role_inferences_mapping.get( role_id, set()) role_names =", "LOG.info(\"Restoring original roles %s\", cls._orig_roles) roles_already_present = cls._list_and_clear_user_roles_on_project( cls._orig_roles) if", "received before calling the ``override_role_and_validate_list`` function. To validate will be", "process of elimination, it can be determined whether one was", "variable is not assigned. \"\"\" if self.resources is None: raise", "super(RbacUtilsMixin, self).__init__(*args, **kwargs) # Shows if override_role was called. self.__override_role_called", "from oslo_log import log as logging from oslo_utils import excutils", "permissions and limitations # under the License. import contextlib import", "is not None: self._set_override_role_caught_exc() # This code block is always", "+= \" Following roles were not found: %s.\" % (", "# # Unless required by applicable law or agreed to", "cls._project_id = cls.os_primary.credentials.tenant_id cls._user_id = cls.os_primary.credentials.user_id cls._role_inferences_mapping = cls._prepare_role_inferences_mapping() cls._init_roles()", "raise rbac_exceptions.RbacValidateListException( reason=\"admin_resources and admin_resource_id are mutually \" \"exclusive\") def", "extended list of roles \"\"\" res = set(r for r", "response bodies are returned instead of exceptions. This helper validates", "``ValidateListContext``. Either ``admin_resources`` or ``admin_resource_id`` should be used, not both.", "\"2\": [\"3\"], # \"member\": [\"reader\"], \"1\": [\"2\", \"3\"] # \"admin\":", "was called.\"\"\" self.__override_role_called = True def _set_override_role_caught_exc(self): \"\"\"Helper for tracking", "testing.\") if not admin_role_id: missing_roles.append(CONF.identity.admin_role) if not all(rbac_role_ids): missing_roles +=", "and the result of a list function must be assigned", "``_validate_len`` function. :param UUID admin_resource_id: An ID of a resource", "created before calling the ``override_role_and_validate_list`` function. :return: py:class:`_ValidateListContext` object. Example::", "} ] } and converts it to the mapping:: {", "\"admin\" and \"Member\", and we are switching to the #", "def __init__(self, admin_resources=None, admin_resource_id=None): \"\"\"Constructor for ``ValidateListContext``. Either ``admin_resources`` or", "for switching ``os_primary`` role. Should be used as a mixin", "is used in ``override_role_and_validate_list`` function and the result of a", "for rid in implied_roles} res.update(role_names) LOG.debug('All needed roles: %s; Base", "the case where # ``[identity] admin_role`` == ``[patrole] rbac_test_roles``. if", "rbac_test_roles` or \" \"`[identity] admin_role`, both of which are required", "the # \"Member\" role, then we must delete the \"admin\"", "{\"id\": \"1\", \"name\": \"admin\"} } ] } and converts it", "\"RBAC testing.\") if not admin_role_id: missing_roles.append(CONF.identity.admin_role) if not all(rbac_role_ids): missing_roles", "admin cls._override_role(False) super(RbacUtilsMixin, cls).setup_clients() @classmethod def _prepare_role_inferences_mapping(cls): \"\"\"Preparing roles mapping", "len(self.resources): raise rbac_exceptions.RbacEmptyResponseBody() elif self._admin_len > len(self.resources): raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def", "the resource created by admin admin_resource_id = ( self.ntp_client.create_dscp_marking_rule() [\"dscp_marking_rule\"][\"id'])", "reason=\"admin_resources and admin_resource_id are mutually \" \"exclusive\") def _validate_len(self): \"\"\"Validates", "of the available resources. Should only be used for validating", "exc is not None: self._set_override_role_caught_exc() # This code block is", "_validate_override_role_called(self): \"\"\"Idempotently validate that ``override_role`` is called and reset its", "\"name\": \"member\"} }, { \"implies\": [{\"id\": \"2\", \"name\": \"member\"}], \"prior_role\":", "cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] cls._orig_roles = [role['id'] for role in roles]", "cls._role_map = {r['name']: r['id'] for r in available_roles} LOG.debug('Available roles:", "def convert_data(data): res = {} for rule in data: prior_role", "remember that # for future validation. exc = sys.exc_info()[0] if", "roles] cls.addClassResourceCleanup(cls.restore_roles) # Change default role to admin cls._override_role(False) super(RbacUtilsMixin,", "cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id, cls._user_id, role['id']) return False @contextlib.contextmanager def override_role_and_validate_list(self, admin_resources=None,", "instance of :py:class:`tempest.test.BaseTestCase` to perform Patrole class setup for a", "if `target_role` already exists. if not roles_already_present: cls._create_user_role_on_project(target_roles) except Exception", "%s.\" % ( \", \".join(missing_roles)) msg += \" Available roles:", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "%s', cls._role_map.keys()) rbac_role_ids = [] roles = CONF.patrole.rbac_test_roles # TODO(vegasq)", "finally: auth_providers = cls.get_auth_providers() for provider in auth_providers: provider.clear_auth() #", "to perform the API call which does policy enforcement. The", "[\"custom_role\"] :param roles: list of roles :return: extended list of", "self.foo_service.bar_api_call() # The role is switched back to admin automatically.", "not use ``role_id in all_role_ids`` here # to avoid over-permission", "case where # ``[identity] admin_role`` == ``[patrole] rbac_test_roles``. if not", "cls._role_map.keys()) rbac_role_ids = [] roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop", "are an exact match. if set(role_ids) == set(all_role_ids): return True", "cls).setup_clients() @classmethod def _prepare_role_inferences_mapping(cls): \"\"\"Preparing roles mapping to support role", "this # point in the test is not executed. \"\"\"", "raw data:: {\"role_inferences\": [ { \"implies\": [{\"id\": \"3\", \"name\": \"reader\"}],", "cls._user_id, role_id) @classmethod def _list_and_clear_user_roles_on_project(cls, role_ids): roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id,", "may obtain # a copy of the License at #", "time.sleep(1) for provider in auth_providers: provider.set_auth() @classmethod def _init_roles(cls): available_roles", "Unless required by applicable law or agreed to in writing,", "cls.admin_roles_client.list_roles()['roles'] cls._role_map = {r['name']: r['id'] for r in available_roles} LOG.debug('Available", "instead of exceptions. This helper validates that unauthorized roles only", "available for member role ctx.resources = self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)[\"dscp_marking_rules\"] \"\"\" ctx", "needed for setup and clean up, and primary credentials, needed", "\"\"\" if self.resources is None: raise rbac_exceptions.RbacValidateListException( reason=\"ctx.resources is not", "for role in roles] cls.addClassResourceCleanup(cls.restore_roles) # Change default role to", "AT&T Corporation. # All Rights Reserved. # # Licensed under", "a base RBAC class. Child classes should not use this", "_init_roles(cls): available_roles = cls.admin_roles_client.list_roles()['roles'] cls._role_map = {r['name']: r['id'] for r", "the result of the # test. Automatically switch back to", "self.__override_role_called = True def _set_override_role_caught_exc(self): \"\"\"Helper for tracking whether exception", "ID of a resource created before calling the ``override_role_and_validate_list`` function.", "future validation. exc = sys.exc_info()[0] if exc is not None:", "\"rule\" in the decorator. self.foo_service.bar_api_call() # The role is switched", "``override_role`` is called and reset its value to False for", "TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: if not", "the ``ctx.resources`` variable. Example:: with self.override_role_and_validate_list(...) as ctx: ctx.resources =", "excutils from tempest import config from tempest.lib import exceptions as", "the number of resources is less than admin resources. \"\"\"", "inferences Making query to `list-all-role-inference-rules`_ keystone API returns all inference", "cls._role_inferences_mapping = cls._prepare_role_inferences_mapping() cls._init_roles() # Store the user's original roles", "not use this mixin. Example:: class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest): @classmethod def", "``os_primary`` role. Should be used as a mixin class alongside", "possible to prepare roles mapping. It walks recursively through the", "``os_primary`` Tempest credentials. Temporarily change the role used by ``os_primary``", "\"\"\"Calls the proper validation function. :raises RbacValidateListException: if the ``ctx.resources``", "None @classmethod def restore_roles(cls): if cls._orig_roles: LOG.info(\"Restoring original roles %s\",", "it is possible to seamlessly swap between admin credentials, needed", "cls._orig_roles: LOG.info(\"Restoring original roles %s\", cls._orig_roles) roles_already_present = cls._list_and_clear_user_roles_on_project( cls._orig_roles)", "this method to include their own or third party client", "roles] # NOTE(felipemonteiro): We do not use ``role_id in all_role_ids``", "before calling the ``override_role_and_validate_list`` function. To validate will be used", "[\"admin\", \"member\", \"reader\"] [\"member\"] >> [\"member\", \"reader\"] [\"reader\"] >> [\"reader\"]", "if CONF.patrole.rbac_test_role: if not roles: roles.append(CONF.patrole.rbac_test_role) for role_name in roles:", "ctx._validate() @classmethod def get_auth_providers(cls): \"\"\"Returns list of auth_providers used within", ":raises RbacValidateListException: if both ``admin_resources`` and ``admin_resource_id`` are set or", "the admin resource is present in the resources. \"\"\" for", "either express or implied. See the # License for the", "test. Tests may redefine this method to include their own", "roles: roles.append(CONF.patrole.rbac_test_role) for role_name in roles: rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id = cls._role_map.get(CONF.identity.admin_role)", "= cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] cls._orig_roles = [role['id'] for role in", "may # not use this file except in compliance with", "function and the result of a list function must be", "# the list of resources available for member role ctx.resources", "(cls._rbac_role_ids if toggle_rbac_role else [cls._admin_role_id]) roles_already_present = cls._list_and_clear_user_roles_on_project( target_roles) #", "which are required for \" \"RBAC testing.\") if not admin_role_id:", "self.ntp_client.create_dscp_marking_rule() [\"dscp_marking_rule\"][\"id']) with self.override_role_and_validate_list( admin_resource_id=admin_resource_id) as ctx: # the list", "execution Automatically switches to admin role after test execution. :returns:", "actions usually do soft authorization: partial or empty response bodies", "in data: prior_role = rule['prior_role']['id'] implies = {r['id'] for r", "role_ids): for role_id in role_ids: cls.admin_roles_client.create_user_role_on_project( cls._project_id, cls._user_id, role_id) @classmethod", "via a context is admin # lookup. return CONF.identity.admin_role in", "auth_providers used within test. Tests may redefine this method to", "now been overridden. Within # this block, call the API", "None _orig_roles = [] admin_roles_client = None @classmethod def restore_roles(cls):", "else [cls._admin_role_id]) roles_already_present = cls._list_and_clear_user_roles_on_project( target_roles) # Do not override", "of elimination, it can be determined whether one was thrown", "call the API endpoint that enforces the # expected policy", "if set(role_ids) == set(all_role_ids): return True for role in roles:", "rbac_test_roles`` before test execution * ``[identity] admin_role`` after test execution", "exp: with excutils.save_and_reraise_exception(): LOG.exception(exp) finally: auth_providers = cls.get_auth_providers() for provider", "self.__override_role_caught_exc = False _admin_role_id = None _rbac_role_ids = None _project_id", "in all_role_ids`` here # to avoid over-permission errors: if the", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "if both ``admin_resources`` and ``admin_resource_id`` are set or unset. \"\"\"", "= False return caught_exception def is_admin(): \"\"\"Verifies whether the current", "role_ids: cls.admin_roles_client.create_user_role_on_project( cls._project_id, cls._user_id, role_id) @classmethod def _list_and_clear_user_roles_on_project(cls, role_ids): roles", "res[role_id] = process_roles(role_id, data) return res def get_all_needed_roles(self, roles): \"\"\"Extending", "credentials. Temporarily change the role used by ``os_primary`` credentials to:", "list of resources received before calling the ``override_role_and_validate_list`` function. :param", "def get_all_needed_roles(self, roles): \"\"\"Extending given roles with roles from mapping", "# expected policy specified by \"rule\" in the decorator. self.foo_service.bar_api_call()", "= cls.get_auth_providers() for provider in auth_providers: provider.clear_auth() # Fernet tokens", "is less than admin resources. \"\"\" if not len(self.resources): raise", "sys import time from oslo_log import log as logging from", "[{\"id\": \"2\", \"name\": \"member\"}], \"prior_role\": {\"id\": \"1\", \"name\": \"admin\"} }", "used within test. Tests may redefine this method to include", "available_roles} LOG.debug('Available roles: %s', cls._role_map.keys()) rbac_role_ids = [] roles =", "import exceptions as lib_exc from patrole_tempest_plugin import rbac_exceptions CONF =", "rbac_test_role`` * If False: role is set to ``[identity] admin_role``", "was thrown outside (which is invalid). \"\"\" caught_exception = self.__override_role_caught_exc", "overriding only supports v3 identity API.\") cls.admin_roles_client = admin_roles_client cls._project_id", "True def _validate_override_role_called(self): \"\"\"Idempotently validate that ``override_role`` is called and", "usually do soft authorization: partial or empty response bodies are", "caught_exception def is_admin(): \"\"\"Verifies whether the current test role equals", "Allocate test-level resources here. with self.override_role(): # The role for", "= None if admin_resources is not None and not admin_resource_id:", "if not roles: roles.append(CONF.patrole.rbac_test_role) for role_name in roles: rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id", "or ``admin_resource_id`` should be used, not both. :param list admin_resources:", "be used for validating list API actions. :param test_obj: Instance", "\"member\": [\"reader\"], \"1\": [\"2\", \"3\"] # \"admin\": [\"member\", \"reader\"] }", "after test execution. :returns: None .. warning:: This function can", "and admin_resources is None: self._admin_resource_id = admin_resource_id self._validate_func = self._validate_resource", "cls._user_id)['roles'] all_role_ids = [role['id'] for role in roles] # NOTE(felipemonteiro):", "roles: cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id, cls._user_id, role['id']) return False @contextlib.contextmanager def override_role_and_validate_list(self,", "switching ``os_primary`` role. Should be used as a mixin class", "``_validate_resource`` function. :raises RbacValidateListException: if both ``admin_resources`` and ``admin_resource_id`` are", "avoid over-permission errors: if the current list of roles on", "the API call above threw an exception, any code below", "[\"member\", \"reader\"] [\"reader\"] >> [\"reader\"] [\"custom_role\"] >> [\"custom_role\"] :param roles:", "if CONF.identity_feature_enabled.api_v3: admin_roles_client = cls.os_admin.roles_v3_client else: raise lib_exc.InvalidConfiguration( \"Patrole role", "return caught_exception def is_admin(): \"\"\"Verifies whether the current test role", "_validate_resource(self): \"\"\"Validates that the admin resource is present in the", "def _create_user_role_on_project(cls, role_ids): for role_id in role_ids: cls.admin_roles_client.create_user_role_on_project( cls._project_id, cls._user_id,", "governing permissions and limitations # under the License. import contextlib", "[\"member\"] >> [\"member\", \"reader\"] [\"reader\"] >> [\"reader\"] [\"custom_role\"] >> [\"custom_role\"]", "Should be used as a mixin class alongside an instance", "decorator. self.foo_service.bar_api_call() # The role is switched back to admin", "current test role equals the admin role. :returns: True if", "by ``os_primary`` Tempest credentials. Temporarily change the role used by", "@classmethod def setup_clients(cls): if CONF.identity_feature_enabled.api_v3: admin_roles_client = cls.os_admin.roles_v3_client else: raise", "@classmethod def _prepare_role_inferences_mapping(cls): \"\"\"Preparing roles mapping to support role inferences", "``admin_resource_id`` are set or unset. \"\"\" self.resources = None if", "are required for \" \"RBAC testing.\") if not admin_role_id: missing_roles.append(CONF.identity.admin_role)", "# noqa: E501 def process_roles(role_id, data): roles = data.get(role_id, set())", "UUID admin_resource_id: An ID of a resource created before calling", "_admin_role_id = None _rbac_role_ids = None _project_id = None _user_id", "the ``override_role_and_validate_list`` function. :return: py:class:`_ValidateListContext` object. Example:: # the resource", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "underway to safely clean up after this function. Example:: @rbac_rule_validation.action(service='test',", "in cls._role_map] msg += \" Following roles were not found:", "admin_resource_id: An ID of a resource created before calling the", "admin_resource_id: self._admin_len = len(admin_resources) if not self._admin_len: raise rbac_exceptions.RbacValidateListException( reason=\"the", "proper validation function. :raises RbacValidateListException: if the ``ctx.resources`` variable is", "for role_id in data: res[role_id] = process_roles(role_id, data) return res", "msg += \" Following roles were not found: %s.\" %", "for overriding the value of the primary Tempest credential's role", "may redefine this method to include their own or third", "raised. If so, remember that # for future validation. exc", "def _validate_override_role_called(self): \"\"\"Idempotently validate that ``override_role`` is called and reset", "the ``override_role_and_validate_list`` function. To validate will be used the ``_validate_len``", "Example:: with self.override_role_and_validate_list(...) as ctx: ctx.resources = list_function() \"\"\" def", "admin resource is present in the resources. \"\"\" for resource", "def override_role(self): \"\"\"Override the role used by ``os_primary`` Tempest credentials.", "to `list-all-role-inference-rules`_ keystone API returns all inference rules, which makes", "of roles :return: extended list of roles \"\"\" res =", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "contextlib import sys import time from oslo_log import log as", "``override_role``, so that, by process of elimination, it can be", "needed roles: %s; Base roles: %s', res, roles) return list(res)", "= True def _validate_override_role_called(self): \"\"\"Idempotently validate that ``override_role`` is called", "\"\"\" ctx = _ValidateListContext(admin_resources, admin_resource_id) with self.override_role(): yield ctx ctx._validate()", "cls.os_primary.credentials.user_id cls._role_inferences_mapping = cls._prepare_role_inferences_mapping() cls._init_roles() # Store the user's original", "include their own or third party client auth_providers. \"\"\" return", "= data.get(role_id, set()) for rid in roles.copy(): roles.update(process_roles(rid, data)) return", "the decorator. self.foo_service.bar_api_call() # The role is switched back to", "cls.admin_roles_client = admin_roles_client cls._project_id = cls.os_primary.credentials.tenant_id cls._user_id = cls.os_primary.credentials.user_id cls._role_inferences_mapping", "credentials always cycle between roles defined by ``CONF.identity.admin_role`` and ``CONF.patrole.rbac_test_roles``.", "lib_exc from patrole_tempest_plugin import rbac_exceptions CONF = config.CONF LOG =", "as ctx: # the list of resources available for member", "the # test. Automatically switch back to the admin role", "subsecond aware so sleep to ensure we are # passing", "is set to ``[patrole] rbac_test_role`` * If False: role is", "convert_data(raw_data['role_inferences']) res = {} for role_id in data: res[role_id] =", ":return: py:class:`_ValidateListContext` object. Example:: # the resource created by admin", "of a list function must be assigned to the ``ctx.resources``", "roles = data.get(role_id, set()) for rid in roles.copy(): roles.update(process_roles(rid, data))", "roles) return list(res) @contextlib.contextmanager def override_role(self): \"\"\"Override the role used", "%s; Base roles: %s', res, roles) return list(res) @contextlib.contextmanager def", "return res def get_all_needed_roles(self, roles): \"\"\"Extending given roles with roles", "function. :return: py:class:`_ValidateListContext` object. Example:: # the resource created by", "= admin_role_id cls._rbac_role_ids = rbac_role_ids # Adding backward mapping cls._role_map.update({v:", "it to the mapping:: { \"2\": [\"3\"], # \"member\": [\"reader\"],", "`target_role` already exists. if not roles_already_present: cls._create_user_role_on_project(target_roles) except Exception as", "``override_role``. \"\"\" self.__override_role_caught_exc = True def _validate_override_role_called(self): \"\"\"Idempotently validate that", "caught_exception = self.__override_role_caught_exc self.__override_role_caught_exc = False return caught_exception def is_admin():", "TODO(felipemonteiro): Make this more robust via a context is admin", "= True def _set_override_role_caught_exc(self): \"\"\"Helper for tracking whether exception was", "policy specified by \"rule\" in the decorator. self.foo_service.bar_api_call() # The", "overriding the value of the primary Tempest credential's role (i.e.", "role used by ``os_primary`` credentials to: * ``[patrole] rbac_test_roles`` before", "\".join(cls._role_map) raise rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id = admin_role_id cls._rbac_role_ids = rbac_role_ids #", "invalid). \"\"\" caught_exception = self.__override_role_caught_exc self.__override_role_caught_exc = False return caught_exception", "roles for pre-provisioned credentials. Work is underway to safely clean", ">> [\"reader\"] [\"custom_role\"] >> [\"custom_role\"] :param roles: list of roles", "and primary credentials, needed to perform the API call which", "the specific language governing permissions and limitations # under the", "CONF.patrole.rbac_test_role: if not roles: roles.append(CONF.patrole.rbac_test_role) for role_name in roles: rbac_role_ids.append(cls._role_map.get(role_name))", "in roles: cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id, cls._user_id, role['id']) return False @contextlib.contextmanager def", "under the Apache License, Version 2.0 (the \"License\"); you may", "that enforces the # expected policy specified by \"rule\" in", "determined whether one was thrown outside (which is invalid). \"\"\"", "If so, remember that # for future validation. exc =", "= False try: target_roles = (cls._rbac_role_ids if toggle_rbac_role else [cls._admin_role_id])", "of which are required for \" \"RBAC testing.\") if not", "_rbac_role_ids = None _project_id = None _user_id = None _role_map", "not None: self._set_override_role_caught_exc() # This code block is always executed,", "``[identity] admin_role`` == ``[patrole] rbac_test_roles``. if not roles_already_present: time.sleep(1) for", "roles_already_present = cls._list_and_clear_user_roles_on_project( cls._orig_roles) if not roles_already_present: cls._create_user_role_on_project(cls._orig_roles) @classmethod def", "\"`[identity] admin_role`, both of which are required for \" \"RBAC", "caught inside ``override_role``, so that, by process of elimination, it", "reason=\"ctx.resources is not assigned\") self._validate_func() class RbacUtilsMixin(object): \"\"\"Utility mixin responsible", "**kwargs) # Shows if override_role was called. self.__override_role_called = False", "role_name in roles: rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id = cls._role_map.get(CONF.identity.admin_role) if not all([admin_role_id,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "cls._user_id)['roles'] cls._orig_roles = [role['id'] for role in roles] cls.addClassResourceCleanup(cls.restore_roles) #", "role is set to ``[patrole] rbac_test_role`` * If False: role", "in roles] # NOTE(felipemonteiro): We do not use ``role_id in", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "switch back to the admin role for test clean #", "from tempest.lib import exceptions as lib_exc from patrole_tempest_plugin import rbac_exceptions", "= None _user_id = None _role_map = None _role_inferences_mapping =", "[\"member\", \"reader\"] } .. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules \"\"\" # noqa: E501", "self.__override_role_called self.__override_role_called = False return was_called def _validate_override_role_caught_exc(self): \"\"\"Idempotently validate", "rbac_exceptions.RbacValidateListException( reason=\"ctx.resources is not assigned\") self._validate_func() class RbacUtilsMixin(object): \"\"\"Utility mixin", "# passing the second boundary before attempting to authenticate. #", "cls._init_roles() # Store the user's original roles and rollback after", "as a result of role # overriding. This will optimize", "required by applicable law or agreed to in writing, software", "If True: role is set to ``[patrole] rbac_test_role`` * If", "role is set to ``[identity] admin_role`` \"\"\" LOG.debug('Overriding role to:", "the # project includes \"admin\" and \"Member\", and we are", "resources. \"\"\" if not len(self.resources): raise rbac_exceptions.RbacEmptyResponseBody() elif self._admin_len >", "v3 identity API.\") cls.admin_roles_client = admin_roles_client cls._project_id = cls.os_primary.credentials.tenant_id cls._user_id", "be assigned to the ``ctx.resources`` variable. Example:: with self.override_role_and_validate_list(...) as", "None and not admin_resource_id: self._admin_len = len(admin_resources) if not self._admin_len:", "\"\"\" # noqa: E501 def process_roles(role_id, data): roles = data.get(role_id,", "in roles.copy(): roles.update(process_roles(rid, data)) return roles def convert_data(data): res =", "\" Available roles: %s.\" % \", \".join(cls._role_map) raise rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id", "list_function() \"\"\" def __init__(self, admin_resources=None, admin_resource_id=None): \"\"\"Constructor for ``ValidateListContext``. Either", ":param toggle_rbac_role: Boolean value that controls the role that overrides", "= self.__override_role_called self.__override_role_called = False return was_called def _validate_override_role_caught_exc(self): \"\"\"Idempotently", "agreed to in writing, software # distributed under the License", "rule['implies']} res[prior_role] = implies return res raw_data = cls.admin_roles_client.list_all_role_inference_rules() data", "admin role for test clean # up. self._override_role(False) @classmethod def", "block, call the API endpoint that enforces the # expected", "distributed under the License is distributed on an \"AS IS\"", "executed. \"\"\" self._set_override_role_called() self._override_role(True) try: # Execute the test. yield", "admin_resources=None, admin_resource_id=None): \"\"\"Constructor for ``ValidateListContext``. Either ``admin_resources`` or ``admin_resource_id`` should", "assigned. \"\"\" if self.resources is None: raise rbac_exceptions.RbacValidateListException( reason=\"ctx.resources is", "a subset of the available resources. Should only be used", "= process_roles(role_id, data) return res def get_all_needed_roles(self, roles): \"\"\"Extending given", "in ``override_role_and_validate_list`` function and the result of a list function", "k for k, v in cls._role_map.items()}) @classmethod def _create_user_role_on_project(cls, role_ids):", "CONDITIONS OF ANY KIND, either express or implied. See the", "test-level resources here. with self.override_role(): # The role for `os_primary`", "sys.exc_info()[0] if exc is not None: self._set_override_role_caught_exc() # This code", "whether the current test role equals the admin role. :returns:", "py:class:`_ValidateListContext` object. Example:: # the resource created by admin admin_resource_id", "CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role) roles = list(set(roles)) # TODO(felipemonteiro): Make this more", "res[prior_role] = implies return res raw_data = cls.admin_roles_client.list_all_role_inference_rules() data =", "{ \"implies\": [{\"id\": \"2\", \"name\": \"member\"}], \"prior_role\": {\"id\": \"1\", \"name\":", "was_called = self.__override_role_called self.__override_role_called = False return was_called def _validate_override_role_caught_exc(self):", "so, remember that # for future validation. exc = sys.exc_info()[0]", "* If True: role is set to ``[patrole] rbac_test_role`` *", "admin resources. \"\"\" if not len(self.resources): raise rbac_exceptions.RbacEmptyResponseBody() elif self._admin_len", "res = {} for role_id in data: res[role_id] = process_roles(role_id,", "\" \"exclusive\") def _validate_len(self): \"\"\"Validates that the number of resources", "The role is switched back to admin automatically. Note that", "res.update(role_names) LOG.debug('All needed roles: %s; Base roles: %s', res, roles)", "roles_already_present: time.sleep(1) for provider in auth_providers: provider.set_auth() @classmethod def _init_roles(cls):", "role for `os_primary` has now been overridden. Within # this", "before calling the ``override_role_and_validate_list`` function. :return: py:class:`_ValidateListContext` object. Example:: #", "is set to ``[identity] admin_role`` \"\"\" LOG.debug('Overriding role to: %s.',", "will be used the ``_validate_len`` function. :param UUID admin_resource_id: An", "resource in self.resources: if resource['id'] == self._admin_resource_id: return raise rbac_exceptions.RbacPartialResponseBody(body=self.resources)", "Work is underway to safely clean up after this function.", "function. Example:: @rbac_rule_validation.action(service='test', rules=['a:test:rule']) def test_foo(self): # Allocate test-level resources", "must delete the \"admin\" role. Thus, we only # return", "\"\"\" def __init__(self, admin_resources=None, admin_resource_id=None): \"\"\"Constructor for ``ValidateListContext``. Either ``admin_resources``", "yield ctx ctx._validate() @classmethod def get_auth_providers(cls): \"\"\"Returns list of auth_providers", "backward mapping cls._role_map.update({v: k for k, v in cls._role_map.items()}) @classmethod", "admin_resource_id are mutually \" \"exclusive\") def _validate_len(self): \"\"\"Validates that the", ">> [\"member\", \"reader\"] [\"reader\"] >> [\"reader\"] [\"custom_role\"] >> [\"custom_role\"] :param", "resource is present in the resources. \"\"\" for resource in", "self._validate_func() class RbacUtilsMixin(object): \"\"\"Utility mixin responsible for switching ``os_primary`` role.", "role['id']) return False @contextlib.contextmanager def override_role_and_validate_list(self, admin_resources=None, admin_resource_id=None): \"\"\"Call ``override_role``", "2017 AT&T Corporation. # All Rights Reserved. # # Licensed", "LOG.debug('Available roles: %s', cls._role_map.keys()) rbac_role_ids = [] roles = CONF.patrole.rbac_test_roles", "then we must delete the \"admin\" role. Thus, we only", "called. self.__override_role_called = False # Shows if exception raised during", "= False # Shows if exception raised during override_role. self.__override_role_caught_exc", "``os_primary`` credentials. * If True: role is set to ``[patrole]", "ctx.resources = self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)[\"dscp_marking_rules\"] \"\"\" ctx = _ValidateListContext(admin_resources, admin_resource_id) with", "credentials, needed to perform the API call which does policy", "used the ``_validate_resource`` function. :raises RbacValidateListException: if both ``admin_resources`` and", "which does policy enforcement. The primary credentials always cycle between", "\"reader\"] [\"member\"] >> [\"member\", \"reader\"] [\"reader\"] >> [\"reader\"] [\"custom_role\"] >>", "result of a list function must be assigned to the", "= len(admin_resources) if not self._admin_len: raise rbac_exceptions.RbacValidateListException( reason=\"the list of", "Base roles: %s', res, roles) return list(res) @contextlib.contextmanager def override_role(self):", "[\"dscp_marking_rule\"][\"id']) with self.override_role_and_validate_list( admin_resource_id=admin_resource_id) as ctx: # the list of", "user roles for pre-provisioned credentials. Work is underway to safely", "= None @classmethod def restore_roles(cls): if cls._orig_roles: LOG.info(\"Restoring original roles", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "function. :raises RbacValidateListException: if both ``admin_resources`` and ``admin_resource_id`` are set", "not all([admin_role_id, all(rbac_role_ids)]): missing_roles = [] msg = (\"Could not", "of roles \"\"\" res = set(r for r in roles)", "use this mixin. Example:: class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest): @classmethod def setup_clients(cls):", "if not len(self.resources): raise rbac_exceptions.RbacEmptyResponseBody() elif self._admin_len > len(self.resources): raise", "[] admin_roles_client = None @classmethod def restore_roles(cls): if cls._orig_roles: LOG.info(\"Restoring", "both. :param list admin_resources: The list of resources received before", "should be used, not both. :param list admin_resources: The list", "roles with roles from mapping Examples:: [\"admin\"] >> [\"admin\", \"member\",", "See the # License for the specific language governing permissions", "credentials. Work is underway to safely clean up after this", "{} for rule in data: prior_role = rule['prior_role']['id'] implies =", "``admin_resources`` and ``admin_resource_id`` are set or unset. \"\"\" self.resources =", "resources is less than admin resources. \"\"\" if not len(self.resources):", "None _role_inferences_mapping = None _orig_roles = [] admin_roles_client = None", "admin_resource_id) with self.override_role(): yield ctx ctx._validate() @classmethod def get_auth_providers(cls): \"\"\"Returns", "law or agreed to in writing, software # distributed under", "admin_role`` \"\"\" LOG.debug('Overriding role to: %s.', toggle_rbac_role) roles_already_present = False", "that overrides default role of ``os_primary`` credentials. * If True:", "which makes it possible to prepare roles mapping. It walks", ":param list admin_resources: The list of resources received before calling", "or unset. \"\"\" self.resources = None if admin_resources is not", "@classmethod def restore_roles(cls): if cls._orig_roles: LOG.info(\"Restoring original roles %s\", cls._orig_roles)", "def _set_override_role_called(self): \"\"\"Helper for tracking whether ``override_role`` was called.\"\"\" self.__override_role_called", "res raw_data = cls.admin_roles_client.list_all_role_inference_rules() data = convert_data(raw_data['role_inferences']) res = {}", "super(BaseRbacTest, cls).setup_clients() cls.hosts_client = cls.os_primary.hosts_client ... This class is responsible", "available resources. Should only be used for validating list API", "validation of the list functions. This class is used in", "calling the ``override_role_and_validate_list`` function. To validate will be used the", "unset. \"\"\" self.resources = None if admin_resources is not None", "@contextlib.contextmanager def override_role_and_validate_list(self, admin_resources=None, admin_resource_id=None): \"\"\"Call ``override_role`` and validate RBAC", "return list(res) @contextlib.contextmanager def override_role(self): \"\"\"Override the role used by", "to the ``ctx.resources`` variable. Example:: with self.override_role_and_validate_list(...) as ctx: ctx.resources", "**kwargs): super(RbacUtilsMixin, self).__init__(*args, **kwargs) # Shows if override_role was called.", "with self.override_role(): # The role for `os_primary` has now been", "the user's original roles and rollback after testing. roles =", "test execution * ``[identity] admin_role`` after test execution Automatically switches", "yield finally: # Check whether an exception was raised. If", "None: raise rbac_exceptions.RbacValidateListException( reason=\"ctx.resources is not assigned\") self._validate_func() class RbacUtilsMixin(object):", "the user's roles on the project are an exact match.", "= None _rbac_role_ids = None _project_id = None _user_id =", "clean # up. self._override_role(False) @classmethod def _override_role(cls, toggle_rbac_role=False): \"\"\"Private helper", "raise rbac_exceptions.RbacValidateListException( reason=\"ctx.resources is not assigned\") self._validate_func() class RbacUtilsMixin(object): \"\"\"Utility", "by ``CONF.identity.admin_role`` and ``CONF.patrole.rbac_test_roles``. \"\"\" credentials = ['primary', 'admin'] def", "list of roles :return: extended list of roles \"\"\" res", "Execute the test. yield finally: # Check whether an exception", "the list functions. This class is used in ``override_role_and_validate_list`` function", "if override_role was called. self.__override_role_called = False # Shows if", "in the decorator. self.foo_service.bar_api_call() # The role is switched back", "one was thrown outside (which is invalid). \"\"\" caught_exception =", "for sequential tests. \"\"\" was_called = self.__override_role_called self.__override_role_called = False", "\"\"\" for resource in self.resources: if resource['id'] == self._admin_resource_id: return", "back to the admin role for test clean # up.", "are switching to the # \"Member\" role, then we must", "not override roles if `target_role` already exists. if not roles_already_present:", "import excutils from tempest import config from tempest.lib import exceptions", "if not roles_already_present: cls._create_user_role_on_project(cls._orig_roles) @classmethod def setup_clients(cls): if CONF.identity_feature_enabled.api_v3: admin_roles_client", "admin_roles_client cls._project_id = cls.os_primary.credentials.tenant_id cls._user_id = cls.os_primary.credentials.user_id cls._role_inferences_mapping = cls._prepare_role_inferences_mapping()", "\"admin\"} } ] } and converts it to the mapping::", "project includes \"admin\" and \"Member\", and we are switching to", "for role in res.copy(): role_id = self.__class__._role_map.get(role) implied_roles = self.__class__._role_inferences_mapping.get(", "if the user's roles on the project are an exact", "testing. roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] cls._orig_roles = [role['id'] for", "# # Licensed under the Apache License, Version 2.0 (the", "roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] all_role_ids = [role['id'] for role", "rid in implied_roles} res.update(role_names) LOG.debug('All needed roles: %s; Base roles:", ">> [\"admin\", \"member\", \"reader\"] [\"member\"] >> [\"member\", \"reader\"] [\"reader\"] >>", "during override_role. self.__override_role_caught_exc = False _admin_role_id = None _rbac_role_ids =", "test. yield finally: # Check whether an exception was raised.", "self.resources is None: raise rbac_exceptions.RbacValidateListException( reason=\"ctx.resources is not assigned\") self._validate_func()", "errors: if the current list of roles on the #", "subset of the available resources. Should only be used for", "\"Patrole role overriding only supports v3 identity API.\") cls.admin_roles_client =", ":py:class:`tempest.test.BaseTestCase` to perform Patrole class setup for a base RBAC", "set to ``[patrole] rbac_test_role`` * If False: role is set", "current list of roles on the # project includes \"admin\"", "API call above threw an exception, any code below this", ">> [\"custom_role\"] :param roles: list of roles :return: extended list", "override_role_and_validate_list(self, admin_resources=None, admin_resource_id=None): \"\"\"Call ``override_role`` and validate RBAC for a", "its value to False for sequential tests. \"\"\" was_called =", "roles: %s.\" % \", \".join(cls._role_map) raise rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id = admin_role_id", "None: self._set_override_role_caught_exc() # This code block is always executed, no", "[\"3\"], # \"member\": [\"reader\"], \"1\": [\"2\", \"3\"] # \"admin\": [\"member\",", "``os_primary`` role). By doing so, it is possible to seamlessly", ":raises RbacValidateListException: if the ``ctx.resources`` variable is not assigned. \"\"\"", "tracking whether ``override_role`` was called.\"\"\" self.__override_role_called = True def _set_override_role_caught_exc(self):", "rbac_exceptions.RbacEmptyResponseBody() elif self._admin_len > len(self.resources): raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate_resource(self): \"\"\"Validates", "``override_role_and_validate_list`` function and the result of a list function must", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "implies return res raw_data = cls.admin_roles_client.list_all_role_inference_rules() data = convert_data(raw_data['role_inferences']) res", "will be used the ``_validate_resource`` function. :raises RbacValidateListException: if both", "was thrown inside ``override_role``. \"\"\" self.__override_role_caught_exc = True def _validate_override_role_called(self):", "validate will be used the ``_validate_resource`` function. :raises RbacValidateListException: if", "NOTE(felipemonteiro): We do not use ``role_id in all_role_ids`` here #", "\"\"\"Utility mixin responsible for switching ``os_primary`` role. Should be used", "to admin automatically. Note that # if the API call", "of role # overriding. This will optimize test runtime in", "admin_resource_id = ( self.ntp_client.create_dscp_marking_rule() [\"dscp_marking_rule\"][\"id']) with self.override_role_and_validate_list( admin_resource_id=admin_resource_id) as ctx:", "raised during override_role. self.__override_role_caught_exc = False _admin_role_id = None _rbac_role_ids", "responsible for overriding the value of the primary Tempest credential's", "as ctx: ctx.resources = list_function() \"\"\" def __init__(self, admin_resources=None, admin_resource_id=None):", "r in rule['implies']} res[prior_role] = implies return res raw_data =", "in the resources. \"\"\" for resource in self.resources: if resource['id']", "= None _role_map = None _role_inferences_mapping = None _orig_roles =", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "a mixin class alongside an instance of :py:class:`tempest.test.BaseTestCase` to perform", "given roles with roles from mapping Examples:: [\"admin\"] >> [\"admin\",", "a result of role # overriding. This will optimize test", "rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate_resource(self): \"\"\"Validates that the admin resource is present", "\"\"\" LOG.debug('Overriding role to: %s.', toggle_rbac_role) roles_already_present = False try:", ":param roles: list of roles :return: extended list of roles", "# point in the test is not executed. \"\"\" self._set_override_role_called()", "def setup_clients(cls): super(BaseRbacTest, cls).setup_clients() cls.hosts_client = cls.os_primary.hosts_client ... This class", "``CONF.identity.admin_role`` and ``CONF.patrole.rbac_test_roles``. \"\"\" credentials = ['primary', 'admin'] def __init__(self,", "roles were not found: %s.\" % ( \", \".join(missing_roles)) msg", "value of the primary Tempest credential's role (i.e. ``os_primary`` role).", "does policy enforcement. The primary credentials always cycle between roles", "``os_primary`` Tempest credentials. :param toggle_rbac_role: Boolean value that controls the", "= rbac_role_ids # Adding backward mapping cls._role_map.update({v: k for k,", "all([admin_role_id, all(rbac_role_ids)]): missing_roles = [] msg = (\"Could not find", "API call which does policy enforcement. The primary credentials always", "is switched back to admin automatically. Note that # if", "data: prior_role = rule['prior_role']['id'] implies = {r['id'] for r in", "exc = sys.exc_info()[0] if exc is not None: self._set_override_role_caught_exc() #", "{r['name']: r['id'] for r in available_roles} LOG.debug('Available roles: %s', cls._role_map.keys())", "and validate RBAC for a list API action. List actions", "in auth_providers: provider.clear_auth() # Fernet tokens are not subsecond aware", "role_id in role_ids: cls.admin_roles_client.create_user_role_on_project( cls._project_id, cls._user_id, role_id) @classmethod def _list_and_clear_user_roles_on_project(cls,", "_set_override_role_called(self): \"\"\"Helper for tracking whether ``override_role`` was called.\"\"\" self.__override_role_called =", "match. if set(role_ids) == set(all_role_ids): return True for role in", "is None: self._admin_resource_id = admin_resource_id self._validate_func = self._validate_resource else: raise", "method to include their own or third party client auth_providers.", "not roles_already_present: time.sleep(1) for provider in auth_providers: provider.set_auth() @classmethod def", "( \", \".join(missing_roles)) msg += \" Available roles: %s.\" %", "... This class is responsible for overriding the value of", "roles.append(CONF.patrole.rbac_test_role) for role_name in roles: rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id = cls._role_map.get(CONF.identity.admin_role) if", "endpoint that enforces the # expected policy specified by \"rule\"", "project are an exact match. if set(role_ids) == set(all_role_ids): return", "False for sequential tests. \"\"\" was_called = self.__override_role_called self.__override_role_called =", "API actions. :param test_obj: Instance of ``tempest.test.BaseTestCase``. :param list admin_resources:", "a context is admin # lookup. return CONF.identity.admin_role in roles", "is always executed, no matter the result of the #", "\"2\", \"name\": \"member\"}], \"prior_role\": {\"id\": \"1\", \"name\": \"admin\"} } ]", "return a subset of the available resources. Should only be", "This will optimize test runtime in the case where #", "\"name\": \"admin\"} } ] } and converts it to the", "rbac_exceptions.RbacValidateListException( reason=\"the list of admin resources cannot be empty\") self._validate_func", "are returned instead of exceptions. This helper validates that unauthorized", "the # expected policy specified by \"rule\" in the decorator.", "their own or third party client auth_providers. \"\"\" return [cls.os_primary.auth_provider]", "present in the resources. \"\"\" for resource in self.resources: if", "for pre-provisioned credentials. Work is underway to safely clean up", "the admin role for test clean # up. self._override_role(False) @classmethod", "is present in the resources. \"\"\" for resource in self.resources:", "for tracking whether ``override_role`` was called.\"\"\" self.__override_role_called = True def", "= cls.admin_roles_client.list_all_role_inference_rules() data = convert_data(raw_data['role_inferences']) res = {} for role_id", "return was_called def _validate_override_role_caught_exc(self): \"\"\"Idempotently validate that exception was caught", "# This code block is always executed, no matter the", "used for validating list API actions. :param test_obj: Instance of", "and we are switching to the # \"Member\" role, then", "\"reader\"] [\"reader\"] >> [\"reader\"] [\"custom_role\"] >> [\"custom_role\"] :param roles: list", "data: res[role_id] = process_roles(role_id, data) return res def get_all_needed_roles(self, roles):", "for tracking whether exception was thrown inside ``override_role``. \"\"\" self.__override_role_caught_exc", "set(r for r in roles) for role in res.copy(): role_id", "from mapping Examples:: [\"admin\"] >> [\"admin\", \"member\", \"reader\"] [\"member\"] >>", "executed, no matter the result of the # test. Automatically", "role_names = {self.__class__._role_map[rid] for rid in implied_roles} res.update(role_names) LOG.debug('All needed", "Shows if override_role was called. self.__override_role_called = False # Shows", "OF ANY KIND, either express or implied. See the #", "result of the # test. Automatically switch back to the", "implied_roles} res.update(role_names) LOG.debug('All needed roles: %s; Base roles: %s', res,", "contain the admin role. \"\"\" roles = CONF.patrole.rbac_test_roles # TODO(vegasq)", "pre-provisioned credentials. Work is underway to safely clean up after", "in writing, software # distributed under the License is distributed", "is_admin(): \"\"\"Verifies whether the current test role equals the admin", "# Only sleep if a token revocation occurred as a", "= admin_roles_client cls._project_id = cls.os_primary.credentials.tenant_id cls._user_id = cls.os_primary.credentials.user_id cls._role_inferences_mapping =", "\"\"\"Private helper for overriding ``os_primary`` Tempest credentials. :param toggle_rbac_role: Boolean", "the API endpoint that enforces the # expected policy specified", "limitations # under the License. import contextlib import sys import", "%s', res, roles) return list(res) @contextlib.contextmanager def override_role(self): \"\"\"Override the", "query to `list-all-role-inference-rules`_ keystone API returns all inference rules, which", "Store the user's original roles and rollback after testing. roles", "was caught inside ``override_role``, so that, by process of elimination,", "https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules \"\"\" # noqa: E501 def process_roles(role_id, data): roles =", "if a token revocation occurred as a result of role", "config from tempest.lib import exceptions as lib_exc from patrole_tempest_plugin import", "mapping to support role inferences Making query to `list-all-role-inference-rules`_ keystone", "test clean # up. self._override_role(False) @classmethod def _override_role(cls, toggle_rbac_role=False): \"\"\"Private", "of a resource created before calling the ``override_role_and_validate_list`` function. :return:", "Tempest credentials. Temporarily change the role used by ``os_primary`` credentials", "in roles) for role in res.copy(): role_id = self.__class__._role_map.get(role) implied_roles", "warning:: This function can alter user roles for pre-provisioned credentials.", "with the License. You may obtain # a copy of", "The primary credentials always cycle between roles defined by ``CONF.identity.admin_role``", "cls._user_id = cls.os_primary.credentials.user_id cls._role_inferences_mapping = cls._prepare_role_inferences_mapping() cls._init_roles() # Store the", "used in ``override_role_and_validate_list`` function and the result of a list", "a token revocation occurred as a result of role #", "switching to the # \"Member\" role, then we must delete", "responsible for validation of the list functions. This class is", "role. :returns: True if ``rbac_test_roles`` contain the admin role. \"\"\"", "``override_role_and_validate_list`` function. To validate will be used the ``_validate_resource`` function.", "False @contextlib.contextmanager def override_role_and_validate_list(self, admin_resources=None, admin_resource_id=None): \"\"\"Call ``override_role`` and validate", "\"\"\"Call ``override_role`` and validate RBAC for a list API action.", "rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate(self): \"\"\"Calls the proper validation function. :raises RbacValidateListException:", "overriding. This will optimize test runtime in the case where", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "def restore_roles(cls): if cls._orig_roles: LOG.info(\"Restoring original roles %s\", cls._orig_roles) roles_already_present", "was called. self.__override_role_called = False # Shows if exception raised", "the resources. \"\"\" for resource in self.resources: if resource['id'] ==", "and converts it to the mapping:: { \"2\": [\"3\"], #", "overridden. Within # this block, call the API endpoint that", "self.resources: if resource['id'] == self._admin_resource_id: return raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate(self):", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "res = {} for rule in data: prior_role = rule['prior_role']['id']", "``ctx.resources`` variable is not assigned. \"\"\" if self.resources is None:", "_validate(self): \"\"\"Calls the proper validation function. :raises RbacValidateListException: if the", "calling the ``override_role_and_validate_list`` function. :return: py:class:`_ValidateListContext` object. Example:: # the", "oslo_log import log as logging from oslo_utils import excutils from", "reset its value to False for sequential tests. \"\"\" was_called", "= cls.os_admin.roles_v3_client else: raise lib_exc.InvalidConfiguration( \"Patrole role overriding only supports", "list(set(roles)) # TODO(felipemonteiro): Make this more robust via a context", "empty response bodies are returned instead of exceptions. This helper", "we only # return early if the user's roles on", "except in compliance with the License. You may obtain #", "Adding backward mapping cls._role_map.update({v: k for k, v in cls._role_map.items()})", "ctx.resources = list_function() \"\"\" def __init__(self, admin_resources=None, admin_resource_id=None): \"\"\"Constructor for", "%s.', toggle_rbac_role) roles_already_present = False try: target_roles = (cls._rbac_role_ids if", "the admin role. \"\"\" roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop", "rid in roles.copy(): roles.update(process_roles(rid, data)) return roles def convert_data(data): res", "an exception was raised. If so, remember that # for", "in auth_providers: provider.set_auth() @classmethod def _init_roles(cls): available_roles = cls.admin_roles_client.list_roles()['roles'] cls._role_map", "is not executed. \"\"\" self._set_override_role_called() self._override_role(True) try: # Execute the", "RBAC for a list API action. List actions usually do", "setup_clients(cls): super(BaseRbacTest, cls).setup_clients() cls.hosts_client = cls.os_primary.hosts_client ... This class is", "matter the result of the # test. Automatically switch back", "{} for role_id in data: res[role_id] = process_roles(role_id, data) return", "noqa: E501 def process_roles(role_id, data): roles = data.get(role_id, set()) for", "# distributed under the License is distributed on an \"AS", "rule['prior_role']['id'] implies = {r['id'] for r in rule['implies']} res[prior_role] =", "credentials to: * ``[patrole] rbac_test_roles`` before test execution * ``[identity]", "# Unless required by applicable law or agreed to in", "def is_admin(): \"\"\"Verifies whether the current test role equals the", "swap between admin credentials, needed for setup and clean up,", "\" \"`[identity] admin_role`, both of which are required for \"", "to avoid over-permission errors: if the current list of roles", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "for r in roles) for role in res.copy(): role_id =", "removed if CONF.patrole.rbac_test_role: if not roles: roles.append(CONF.patrole.rbac_test_role) for role_name in", "that exception was caught inside ``override_role``, so that, by process", "roles \"\"\" res = set(r for r in roles) for", "the role used by ``os_primary`` credentials to: * ``[patrole] rbac_test_roles``", "admin_role`, both of which are required for \" \"RBAC testing.\")", "RbacValidateListException: if both ``admin_resources`` and ``admin_resource_id`` are set or unset.", "the ``ctx.resources`` variable is not assigned. \"\"\" if self.resources is", "self._override_role(False) @classmethod def _override_role(cls, toggle_rbac_role=False): \"\"\"Private helper for overriding ``os_primary``", "if not admin_role_id: missing_roles.append(CONF.identity.admin_role) if not all(rbac_role_ids): missing_roles += [role_name", "seamlessly swap between admin credentials, needed for setup and clean", "= ( self.ntp_client.create_dscp_marking_rule() [\"dscp_marking_rule\"][\"id']) with self.override_role_and_validate_list( admin_resource_id=admin_resource_id) as ctx: #", "\"\"\" res = set(r for r in roles) for role", "list of auth_providers used within test. Tests may redefine this", "LOG = logging.getLogger(__name__) class _ValidateListContext(object): \"\"\"Context class responsible for validation", "for role in roles: cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id, cls._user_id, role['id']) return False", "= cls._prepare_role_inferences_mapping() cls._init_roles() # Store the user's original roles and", "for a list API action. List actions usually do soft", "cls.get_auth_providers() for provider in auth_providers: provider.clear_auth() # Fernet tokens are", "Tempest credential's role (i.e. ``os_primary`` role). By doing so, it", "len(admin_resources) if not self._admin_len: raise rbac_exceptions.RbacValidateListException( reason=\"the list of admin", "\"\"\" if not len(self.resources): raise rbac_exceptions.RbacEmptyResponseBody() elif self._admin_len > len(self.resources):", "# The role for `os_primary` has now been overridden. Within", "None _rbac_role_ids = None _project_id = None _user_id = None", ":returns: True if ``rbac_test_roles`` contain the admin role. \"\"\" roles", "switches to admin role after test execution. :returns: None ..", "the current list of roles on the # project includes", "# TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role)", "specific language governing permissions and limitations # under the License.", "= self.__class__._role_inferences_mapping.get( role_id, set()) role_names = {self.__class__._role_map[rid] for rid in", "sleep if a token revocation occurred as a result of", "# not use this file except in compliance with the", "Within # this block, call the API endpoint that enforces", "\"prior_role\": {\"id\": \"1\", \"name\": \"admin\"} } ] } and converts", "of resources received before calling the ``override_role_and_validate_list`` function. :param UUID", "= (cls._rbac_role_ids if toggle_rbac_role else [cls._admin_role_id]) roles_already_present = cls._list_and_clear_user_roles_on_project( target_roles)", "safely clean up after this function. Example:: @rbac_rule_validation.action(service='test', rules=['a:test:rule']) def", "``[identity] admin_role`` \"\"\" LOG.debug('Overriding role to: %s.', toggle_rbac_role) roles_already_present =", "missing_roles.append(CONF.identity.admin_role) if not all(rbac_role_ids): missing_roles += [role_name for role_name in", "to admin role after test execution. :returns: None .. warning::", "exception was raised. If so, remember that # for future", "[\"custom_role\"] >> [\"custom_role\"] :param roles: list of roles :return: extended", "from patrole_tempest_plugin import rbac_exceptions CONF = config.CONF LOG = logging.getLogger(__name__)", "# overriding. This will optimize test runtime in the case", "it possible to prepare roles mapping. It walks recursively through", "roles if role_name not in cls._role_map] msg += \" Following", "self._admin_resource_id: return raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate(self): \"\"\"Calls the proper validation", "helper for overriding ``os_primary`` Tempest credentials. :param toggle_rbac_role: Boolean value", "the ``_validate_len`` function. :param UUID admin_resource_id: An ID of a", "is responsible for overriding the value of the primary Tempest", "{self.__class__._role_map[rid] for rid in implied_roles} res.update(role_names) LOG.debug('All needed roles: %s;", "be determined whether one was thrown outside (which is invalid).", "process_roles(role_id, data) return res def get_all_needed_roles(self, roles): \"\"\"Extending given roles", "under the License is distributed on an \"AS IS\" BASIS,", ":return: extended list of roles \"\"\" res = set(r for", "Shows if exception raised during override_role. self.__override_role_caught_exc = False _admin_role_id", "or empty response bodies are returned instead of exceptions. This", "import config from tempest.lib import exceptions as lib_exc from patrole_tempest_plugin", "\"name\": \"member\"}], \"prior_role\": {\"id\": \"1\", \"name\": \"admin\"} } ] }", "= admin_resource_id self._validate_func = self._validate_resource else: raise rbac_exceptions.RbacValidateListException( reason=\"admin_resources and", "controls the role that overrides default role of ``os_primary`` credentials.", "self.override_role(): yield ctx ctx._validate() @classmethod def get_auth_providers(cls): \"\"\"Returns list of", "= CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed if", ":param test_obj: Instance of ``tempest.test.BaseTestCase``. :param list admin_resources: The list", "roles :return: extended list of roles \"\"\" res = set(r", "for rid in roles.copy(): roles.update(process_roles(rid, data)) return roles def convert_data(data):", "this file except in compliance with the License. You may", "exceptions. This helper validates that unauthorized roles only return a", "if toggle_rbac_role else [cls._admin_role_id]) roles_already_present = cls._list_and_clear_user_roles_on_project( target_roles) # Do", "role_ids): roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] all_role_ids = [role['id'] for", "role (i.e. ``os_primary`` role). By doing so, it is possible", "for role_id in role_ids: cls.admin_roles_client.create_user_role_on_project( cls._project_id, cls._user_id, role_id) @classmethod def", "used, not both. :param list admin_resources: The list of resources", "rules=['a:test:rule']) def test_foo(self): # Allocate test-level resources here. with self.override_role():", "[role_name for role_name in roles if role_name not in cls._role_map]", "original roles %s\", cls._orig_roles) roles_already_present = cls._list_and_clear_user_roles_on_project( cls._orig_roles) if not", "import log as logging from oslo_utils import excutils from tempest", "def __init__(self, *args, **kwargs): super(RbacUtilsMixin, self).__init__(*args, **kwargs) # Shows if", "_orig_roles = [] admin_roles_client = None @classmethod def restore_roles(cls): if", "list function must be assigned to the ``ctx.resources`` variable. Example::", "To validate will be used the ``_validate_len`` function. :param UUID", "if the current list of roles on the # project", "function. :param UUID admin_resource_id: An ID of a resource created", "\"member\"} }, { \"implies\": [{\"id\": \"2\", \"name\": \"member\"}], \"prior_role\": {\"id\":", "list of roles \"\"\" res = set(r for r in", "of the primary Tempest credential's role (i.e. ``os_primary`` role). By", "is None: raise rbac_exceptions.RbacValidateListException( reason=\"ctx.resources is not assigned\") self._validate_func() class", "['primary', 'admin'] def __init__(self, *args, **kwargs): super(RbacUtilsMixin, self).__init__(*args, **kwargs) #", "call above threw an exception, any code below this #", "where # ``[identity] admin_role`` == ``[patrole] rbac_test_roles``. if not roles_already_present:", "# Change default role to admin cls._override_role(False) super(RbacUtilsMixin, cls).setup_clients() @classmethod", "[\"reader\"] >> [\"reader\"] [\"custom_role\"] >> [\"custom_role\"] :param roles: list of", "\"\"\"Helper for tracking whether ``override_role`` was called.\"\"\" self.__override_role_called = True", "over-permission errors: if the current list of roles on the", "validate will be used the ``_validate_len`` function. :param UUID admin_resource_id:", "file except in compliance with the License. You may obtain", "resources. \"\"\" for resource in self.resources: if resource['id'] == self._admin_resource_id:", "_create_user_role_on_project(cls, role_ids): for role_id in role_ids: cls.admin_roles_client.create_user_role_on_project( cls._project_id, cls._user_id, role_id)", "`[patrole] rbac_test_roles` or \" \"`[identity] admin_role`, both of which are", "OR CONDITIONS OF ANY KIND, either express or implied. See", "find `[patrole] rbac_test_roles` or \" \"`[identity] admin_role`, both of which", "> len(self.resources): raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate_resource(self): \"\"\"Validates that the admin", "between admin credentials, needed for setup and clean up, and", "the result of a list function must be assigned to", "for r in available_roles} LOG.debug('Available roles: %s', cls._role_map.keys()) rbac_role_ids =", "roles_already_present: cls._create_user_role_on_project(cls._orig_roles) @classmethod def setup_clients(cls): if CONF.identity_feature_enabled.api_v3: admin_roles_client = cls.os_admin.roles_v3_client", "``override_role_and_validate_list`` function. :param UUID admin_resource_id: An ID of a resource", "== self._admin_resource_id: return raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate(self): \"\"\"Calls the proper", "default role to admin cls._override_role(False) super(RbacUtilsMixin, cls).setup_clients() @classmethod def _prepare_role_inferences_mapping(cls):", "test role equals the admin role. :returns: True if ``rbac_test_roles``", "= None _role_inferences_mapping = None _orig_roles = [] admin_roles_client =", "{r['id'] for r in rule['implies']} res[prior_role] = implies return res", "provider.clear_auth() # Fernet tokens are not subsecond aware so sleep", "rollback after testing. roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] cls._orig_roles =", "roles_already_present = cls._list_and_clear_user_roles_on_project( target_roles) # Do not override roles if", "= [] roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role", "elif self._admin_len > len(self.resources): raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate_resource(self): \"\"\"Validates that", "API action. List actions usually do soft authorization: partial or", "roles: %s', cls._role_map.keys()) rbac_role_ids = [] roles = CONF.patrole.rbac_test_roles #", "# Execute the test. yield finally: # Check whether an", "RbacUtilsMixin(object): \"\"\"Utility mixin responsible for switching ``os_primary`` role. Should be", "# Check whether an exception was raised. If so, remember", "Either ``admin_resources`` or ``admin_resource_id`` should be used, not both. :param", "test is not executed. \"\"\" self._set_override_role_called() self._override_role(True) try: # Execute", "data) return res def get_all_needed_roles(self, roles): \"\"\"Extending given roles with", "_override_role(cls, toggle_rbac_role=False): \"\"\"Private helper for overriding ``os_primary`` Tempest credentials. :param", "not both. :param list admin_resources: The list of resources received", "lib_exc.InvalidConfiguration( \"Patrole role overriding only supports v3 identity API.\") cls.admin_roles_client", "role to: %s.', toggle_rbac_role) roles_already_present = False try: target_roles =", "role in roles] cls.addClassResourceCleanup(cls.restore_roles) # Change default role to admin", "Automatically switch back to the admin role for test clean", "below this # point in the test is not executed.", "class alongside an instance of :py:class:`tempest.test.BaseTestCase` to perform Patrole class", "None _role_map = None _role_inferences_mapping = None _orig_roles = []", "} .. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules \"\"\" # noqa: E501 def process_roles(role_id,", "override_role(self): \"\"\"Override the role used by ``os_primary`` Tempest credentials. Temporarily", "for `os_primary` has now been overridden. Within # this block,", "# for future validation. exc = sys.exc_info()[0] if exc is", "we must delete the \"admin\" role. Thus, we only #", "[] msg = (\"Could not find `[patrole] rbac_test_roles` or \"", "for provider in auth_providers: provider.clear_auth() # Fernet tokens are not", "cls._orig_roles = [role['id'] for role in roles] cls.addClassResourceCleanup(cls.restore_roles) # Change", "function. To validate will be used the ``_validate_len`` function. :param", "\"admin\": [\"member\", \"reader\"] } .. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules \"\"\" # noqa:", "exceptions as lib_exc from patrole_tempest_plugin import rbac_exceptions CONF = config.CONF", "switched back to admin automatically. Note that # if the", "exception raised during override_role. self.__override_role_caught_exc = False _admin_role_id = None", "writing, software # distributed under the License is distributed on", "True for role in roles: cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id, cls._user_id, role['id']) return", "the License. You may obtain # a copy of the", "roles from mapping Examples:: [\"admin\"] >> [\"admin\", \"member\", \"reader\"] [\"member\"]", "roles) for role in res.copy(): role_id = self.__class__._role_map.get(role) implied_roles =", "resources here. with self.override_role(): # The role for `os_primary` has", "use this file except in compliance with the License. You", "admin_role`` == ``[patrole] rbac_test_roles``. if not roles_already_present: time.sleep(1) for provider", "primary credentials, needed to perform the API call which does", "assigned\") self._validate_func() class RbacUtilsMixin(object): \"\"\"Utility mixin responsible for switching ``os_primary``", "provider.set_auth() @classmethod def _init_roles(cls): available_roles = cls.admin_roles_client.list_roles()['roles'] cls._role_map = {r['name']:", "Only sleep if a token revocation occurred as a result", "rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id = cls._role_map.get(CONF.identity.admin_role) if not all([admin_role_id, all(rbac_role_ids)]): missing_roles =", "\", \".join(missing_roles)) msg += \" Available roles: %s.\" % \",", "for rule in data: prior_role = rule['prior_role']['id'] implies = {r['id']", "whether an exception was raised. If so, remember that #", "def _init_roles(cls): available_roles = cls.admin_roles_client.list_roles()['roles'] cls._role_map = {r['name']: r['id'] for", "BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest): @classmethod def setup_clients(cls): super(BaseRbacTest, cls).setup_clients() cls.hosts_client = cls.os_primary.hosts_client", "patrole_tempest_plugin import rbac_exceptions CONF = config.CONF LOG = logging.getLogger(__name__) class", "logging from oslo_utils import excutils from tempest import config from", "a resource created before calling the ``override_role_and_validate_list`` function. :return: py:class:`_ValidateListContext`", "used by ``os_primary`` credentials to: * ``[patrole] rbac_test_roles`` before test", "self.__override_role_caught_exc = True def _validate_override_role_called(self): \"\"\"Idempotently validate that ``override_role`` is", "cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] all_role_ids = [role['id'] for role in roles]", "self.override_role_and_validate_list( admin_resource_id=admin_resource_id) as ctx: # the list of resources available", "= {} for role_id in data: res[role_id] = process_roles(role_id, data)", "def _validate_override_role_caught_exc(self): \"\"\"Idempotently validate that exception was caught inside ``override_role``,", "express or implied. See the # License for the specific", "do soft authorization: partial or empty response bodies are returned", "raise lib_exc.InvalidConfiguration( \"Patrole role overriding only supports v3 identity API.\")", "[] roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role is", "the Apache License, Version 2.0 (the \"License\"); you may #", "# Shows if exception raised during override_role. self.__override_role_caught_exc = False", "override_role. self.__override_role_caught_exc = False _admin_role_id = None _rbac_role_ids = None", "( self.ntp_client.create_dscp_marking_rule() [\"dscp_marking_rule\"][\"id']) with self.override_role_and_validate_list( admin_resource_id=admin_resource_id) as ctx: # the", "* ``[identity] admin_role`` after test execution Automatically switches to admin", "self.__override_role_caught_exc self.__override_role_caught_exc = False return caught_exception def is_admin(): \"\"\"Verifies whether", "for overriding ``os_primary`` Tempest credentials. :param toggle_rbac_role: Boolean value that", "\"Member\" role, then we must delete the \"admin\" role. Thus,", "here # to avoid over-permission errors: if the current list", "variable. Example:: with self.override_role_and_validate_list(...) as ctx: ctx.resources = list_function() \"\"\"", "result of role # overriding. This will optimize test runtime", "self._validate_func = self._validate_resource else: raise rbac_exceptions.RbacValidateListException( reason=\"admin_resources and admin_resource_id are", "cycle between roles defined by ``CONF.identity.admin_role`` and ``CONF.patrole.rbac_test_roles``. \"\"\" credentials", "return res raw_data = cls.admin_roles_client.list_all_role_inference_rules() data = convert_data(raw_data['role_inferences']) res =", "admin resources cannot be empty\") self._validate_func = self._validate_len elif admin_resource_id", "k, v in cls._role_map.items()}) @classmethod def _create_user_role_on_project(cls, role_ids): for role_id", "credentials. * If True: role is set to ``[patrole] rbac_test_role``", "\"\"\" credentials = ['primary', 'admin'] def __init__(self, *args, **kwargs): super(RbacUtilsMixin,", "This class is responsible for overriding the value of the", "admin_resource_id=None): \"\"\"Call ``override_role`` and validate RBAC for a list API", "less than admin resources. \"\"\" if not len(self.resources): raise rbac_exceptions.RbacEmptyResponseBody()", "role_name in roles if role_name not in cls._role_map] msg +=", "class setup for a base RBAC class. Child classes should", "cls._rbac_role_ids = rbac_role_ids # Adding backward mapping cls._role_map.update({v: k for", "else: raise lib_exc.InvalidConfiguration( \"Patrole role overriding only supports v3 identity", "role, then we must delete the \"admin\" role. Thus, we", "# test. Automatically switch back to the admin role for", "False: role is set to ``[identity] admin_role`` \"\"\" LOG.debug('Overriding role", "auth_providers: provider.clear_auth() # Fernet tokens are not subsecond aware so", "admin_role`` after test execution Automatically switches to admin role after", "to safely clean up after this function. Example:: @rbac_rule_validation.action(service='test', rules=['a:test:rule'])", "and ``admin_resource_id`` are set or unset. \"\"\" self.resources = None", "class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest): @classmethod def setup_clients(cls): super(BaseRbacTest, cls).setup_clients() cls.hosts_client =", "and reset its value to False for sequential tests. \"\"\"", "= ['primary', 'admin'] def __init__(self, *args, **kwargs): super(RbacUtilsMixin, self).__init__(*args, **kwargs)", "alongside an instance of :py:class:`tempest.test.BaseTestCase` to perform Patrole class setup", "base RBAC class. Child classes should not use this mixin.", "token revocation occurred as a result of role # overriding.", "class responsible for validation of the list functions. This class", "if cls._orig_roles: LOG.info(\"Restoring original roles %s\", cls._orig_roles) roles_already_present = cls._list_and_clear_user_roles_on_project(", "whether exception was thrown inside ``override_role``. \"\"\" self.__override_role_caught_exc = True", "of ``os_primary`` credentials. * If True: role is set to", "reason=\"the list of admin resources cannot be empty\") self._validate_func =", "elimination, it can be determined whether one was thrown outside", "= self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)[\"dscp_marking_rules\"] \"\"\" ctx = _ValidateListContext(admin_resources, admin_resource_id) with self.override_role():", "raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate_resource(self): \"\"\"Validates that the admin resource is", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "Making query to `list-all-role-inference-rules`_ keystone API returns all inference rules,", "_set_override_role_caught_exc(self): \"\"\"Helper for tracking whether exception was thrown inside ``override_role``.", "was_called def _validate_override_role_caught_exc(self): \"\"\"Idempotently validate that exception was caught inside", "and clean up, and primary credentials, needed to perform the", "of exceptions. This helper validates that unauthorized roles only return", "= self._validate_len elif admin_resource_id and admin_resources is None: self._admin_resource_id =", "roles_already_present = False try: target_roles = (cls._rbac_role_ids if toggle_rbac_role else", "Patrole class setup for a base RBAC class. Child classes", "= convert_data(raw_data['role_inferences']) res = {} for role_id in data: res[role_id]", "the proper validation function. :raises RbacValidateListException: if the ``ctx.resources`` variable", "= {} for rule in data: prior_role = rule['prior_role']['id'] implies", "roles: %s; Base roles: %s', res, roles) return list(res) @contextlib.contextmanager", "point in the test is not executed. \"\"\" self._set_override_role_called() self._override_role(True)", "cannot be empty\") self._validate_func = self._validate_len elif admin_resource_id and admin_resources", "[\"reader\"], \"1\": [\"2\", \"3\"] # \"admin\": [\"member\", \"reader\"] } ..", "# The role is switched back to admin automatically. Note", "the test is not executed. \"\"\" self._set_override_role_called() self._override_role(True) try: #", "License for the specific language governing permissions and limitations #", "with self.override_role_and_validate_list(...) as ctx: ctx.resources = list_function() \"\"\" def __init__(self,", "msg = (\"Could not find `[patrole] rbac_test_roles` or \" \"`[identity]", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "inference rules, which makes it possible to prepare roles mapping.", "msg += \" Available roles: %s.\" % \", \".join(cls._role_map) raise", "role ctx.resources = self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)[\"dscp_marking_rules\"] \"\"\" ctx = _ValidateListContext(admin_resources, admin_resource_id)", "Rights Reserved. # # Licensed under the Apache License, Version", "second boundary before attempting to authenticate. # Only sleep if", "import sys import time from oslo_log import log as logging", "self.__override_role_caught_exc = False return caught_exception def is_admin(): \"\"\"Verifies whether the", "# this block, call the API endpoint that enforces the", "are # passing the second boundary before attempting to authenticate.", "is possible to seamlessly swap between admin credentials, needed for", "cls.os_primary.credentials.tenant_id cls._user_id = cls.os_primary.credentials.user_id cls._role_inferences_mapping = cls._prepare_role_inferences_mapping() cls._init_roles() # Store", "# to avoid over-permission errors: if the current list of", "are not subsecond aware so sleep to ensure we are", "for ``ValidateListContext``. Either ``admin_resources`` or ``admin_resource_id`` should be used, not", "list of admin resources cannot be empty\") self._validate_func = self._validate_len", "within test. Tests may redefine this method to include their", "original roles and rollback after testing. roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id,", "test. Automatically switch back to the admin role for test", "inside ``override_role``, so that, by process of elimination, it can", "validation. exc = sys.exc_info()[0] if exc is not None: self._set_override_role_caught_exc()", "\"\"\"Validates that the number of resources is less than admin", "do not use ``role_id in all_role_ids`` here # to avoid", "all_role_ids = [role['id'] for role in roles] # NOTE(felipemonteiro): We", "= [role['id'] for role in roles] cls.addClassResourceCleanup(cls.restore_roles) # Change default", "can alter user roles for pre-provisioned credentials. Work is underway", "admin automatically. Note that # if the API call above", "and rollback after testing. roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] cls._orig_roles", "not in cls._role_map] msg += \" Following roles were not", "override_role was called. self.__override_role_called = False # Shows if exception", "auth_providers: provider.set_auth() @classmethod def _init_roles(cls): available_roles = cls.admin_roles_client.list_roles()['roles'] cls._role_map =", "the # License for the specific language governing permissions and", "\"\"\" self._set_override_role_called() self._override_role(True) try: # Execute the test. yield finally:", "raise rbac_exceptions.RbacValidateListException( reason=\"the list of admin resources cannot be empty\")", "= set(r for r in roles) for role in res.copy():", "[cls.os_primary.auth_provider] def _set_override_role_called(self): \"\"\"Helper for tracking whether ``override_role`` was called.\"\"\"", "res = set(r for r in roles) for role in", "the list of resources available for member role ctx.resources =", "cls._list_and_clear_user_roles_on_project( target_roles) # Do not override roles if `target_role` already", "once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: if not roles: roles.append(CONF.patrole.rbac_test_role)", "for setup and clean up, and primary credentials, needed to", "``rbac_test_roles`` contain the admin role. \"\"\" roles = CONF.patrole.rbac_test_roles #", "Note that # if the API call above threw an", "tempest.lib import exceptions as lib_exc from patrole_tempest_plugin import rbac_exceptions CONF", "= cls.os_primary.credentials.tenant_id cls._user_id = cls.os_primary.credentials.user_id cls._role_inferences_mapping = cls._prepare_role_inferences_mapping() cls._init_roles() #", "so sleep to ensure we are # passing the second", "def _override_role(cls, toggle_rbac_role=False): \"\"\"Private helper for overriding ``os_primary`` Tempest credentials.", "def _prepare_role_inferences_mapping(cls): \"\"\"Preparing roles mapping to support role inferences Making", "= {self.__class__._role_map[rid] for rid in implied_roles} res.update(role_names) LOG.debug('All needed roles:", "roles only return a subset of the available resources. Should", "exception was thrown inside ``override_role``. \"\"\" self.__override_role_caught_exc = True def", "not find `[patrole] rbac_test_roles` or \" \"`[identity] admin_role`, both of", "the available resources. Should only be used for validating list", "not found: %s.\" % ( \", \".join(missing_roles)) msg += \"", "\"implies\": [{\"id\": \"3\", \"name\": \"reader\"}], \"prior_role\": {\"id\": \"2\", \"name\": \"member\"}", "passing the second boundary before attempting to authenticate. # Only", "only return a subset of the available resources. Should only", "unauthorized roles only return a subset of the available resources.", "cls.admin_roles_client.list_all_role_inference_rules() data = convert_data(raw_data['role_inferences']) res = {} for role_id in", "Exception as exp: with excutils.save_and_reraise_exception(): LOG.exception(exp) finally: auth_providers = cls.get_auth_providers()", "cls._create_user_role_on_project(cls._orig_roles) @classmethod def setup_clients(cls): if CONF.identity_feature_enabled.api_v3: admin_roles_client = cls.os_admin.roles_v3_client else:", "assigned to the ``ctx.resources`` variable. Example:: with self.override_role_and_validate_list(...) as ctx:", "\"reader\"] } .. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules \"\"\" # noqa: E501 def", "Examples:: [\"admin\"] >> [\"admin\", \"member\", \"reader\"] [\"member\"] >> [\"member\", \"reader\"]", "auth_providers = cls.get_auth_providers() for provider in auth_providers: provider.clear_auth() # Fernet", "Change default role to admin cls._override_role(False) super(RbacUtilsMixin, cls).setup_clients() @classmethod def", "the role used by ``os_primary`` Tempest credentials. Temporarily change the", "mapping:: { \"2\": [\"3\"], # \"member\": [\"reader\"], \"1\": [\"2\", \"3\"]", "This code block is always executed, no matter the result", "admin credentials, needed for setup and clean up, and primary", "thrown outside (which is invalid). \"\"\" caught_exception = self.__override_role_caught_exc self.__override_role_caught_exc", "LOG.debug('All needed roles: %s; Base roles: %s', res, roles) return", "not assigned\") self._validate_func() class RbacUtilsMixin(object): \"\"\"Utility mixin responsible for switching", "an instance of :py:class:`tempest.test.BaseTestCase` to perform Patrole class setup for", "\"member\"}], \"prior_role\": {\"id\": \"1\", \"name\": \"admin\"} } ] } and", "the primary Tempest credential's role (i.e. ``os_primary`` role). By doing", "the project are an exact match. if set(role_ids) == set(all_role_ids):", "for \" \"RBAC testing.\") if not admin_role_id: missing_roles.append(CONF.identity.admin_role) if not", "early if the user's roles on the project are an", "the role that overrides default role of ``os_primary`` credentials. *", "res def get_all_needed_roles(self, roles): \"\"\"Extending given roles with roles from", "= {r['name']: r['id'] for r in available_roles} LOG.debug('Available roles: %s',", "% ( \", \".join(missing_roles)) msg += \" Available roles: %s.\"", "toggle_rbac_role: Boolean value that controls the role that overrides default", "own or third party client auth_providers. \"\"\" return [cls.os_primary.auth_provider] def", "helper validates that unauthorized roles only return a subset of", "Tests may redefine this method to include their own or", "for role_name in roles: rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id = cls._role_map.get(CONF.identity.admin_role) if not", "CONF.identity_feature_enabled.api_v3: admin_roles_client = cls.os_admin.roles_v3_client else: raise lib_exc.InvalidConfiguration( \"Patrole role overriding", "toggle_rbac_role=False): \"\"\"Private helper for overriding ``os_primary`` Tempest credentials. :param toggle_rbac_role:", "= cls.os_primary.credentials.user_id cls._role_inferences_mapping = cls._prepare_role_inferences_mapping() cls._init_roles() # Store the user's", "# \"admin\": [\"member\", \"reader\"] } .. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules \"\"\" #", "``override_role_and_validate_list`` function. :return: py:class:`_ValidateListContext` object. Example:: # the resource created", "must be assigned to the ``ctx.resources`` variable. Example:: with self.override_role_and_validate_list(...)", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "that # for future validation. exc = sys.exc_info()[0] if exc", "list functions. This class is used in ``override_role_and_validate_list`` function and", "rule in data: prior_role = rule['prior_role']['id'] implies = {r['id'] for", "list of resources received before calling the ``override_role_and_validate_list`` function. To", "License. import contextlib import sys import time from oslo_log import", "above threw an exception, any code below this # point", "you may # not use this file except in compliance", "base.BaseV2ComputeTest): @classmethod def setup_clients(cls): super(BaseRbacTest, cls).setup_clients() cls.hosts_client = cls.os_primary.hosts_client ...", "so, it is possible to seamlessly swap between admin credentials,", "toggle_rbac_role) roles_already_present = False try: target_roles = (cls._rbac_role_ids if toggle_rbac_role", "mixin responsible for switching ``os_primary`` role. Should be used as", "\"\"\"Idempotently validate that ``override_role`` is called and reset its value", "supports v3 identity API.\") cls.admin_roles_client = admin_roles_client cls._project_id = cls.os_primary.credentials.tenant_id", "roles defined by ``CONF.identity.admin_role`` and ``CONF.patrole.rbac_test_roles``. \"\"\" credentials = ['primary',", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "role_id) @classmethod def _list_and_clear_user_roles_on_project(cls, role_ids): roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles']", "self).__init__(*args, **kwargs) # Shows if override_role was called. self.__override_role_called =", "\"\"\"Helper for tracking whether exception was thrown inside ``override_role``. \"\"\"", "primary Tempest credential's role (i.e. ``os_primary`` role). By doing so,", "redefine this method to include their own or third party", "role equals the admin role. :returns: True if ``rbac_test_roles`` contain", "role_name not in cls._role_map] msg += \" Following roles were", "in self.resources: if resource['id'] == self._admin_resource_id: return raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def", "role after test execution. :returns: None .. warning:: This function", "after this function. Example:: @rbac_rule_validation.action(service='test', rules=['a:test:rule']) def test_foo(self): # Allocate", "thrown inside ``override_role``. \"\"\" self.__override_role_caught_exc = True def _validate_override_role_called(self): \"\"\"Idempotently", "enforcement. The primary credentials always cycle between roles defined by", "the test. yield finally: # Check whether an exception was", "rbac_role_ids = [] roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once", "in roles: rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id = cls._role_map.get(CONF.identity.admin_role) if not all([admin_role_id, all(rbac_role_ids)]):", "if the API call above threw an exception, any code", "in available_roles} LOG.debug('Available roles: %s', cls._role_map.keys()) rbac_role_ids = [] roles", "for the specific language governing permissions and limitations # under", "%s.\" % \", \".join(cls._role_map) raise rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id = admin_role_id cls._rbac_role_ids", "Do not override roles if `target_role` already exists. if not", "ensure we are # passing the second boundary before attempting", "the ``_validate_resource`` function. :raises RbacValidateListException: if both ``admin_resources`` and ``admin_resource_id``", "this block, call the API endpoint that enforces the #", "\"member\", \"reader\"] [\"member\"] >> [\"member\", \"reader\"] [\"reader\"] >> [\"reader\"] [\"custom_role\"]", "= cls._role_map.get(CONF.identity.admin_role) if not all([admin_role_id, all(rbac_role_ids)]): missing_roles = [] msg", "for k, v in cls._role_map.items()}) @classmethod def _create_user_role_on_project(cls, role_ids): for", "to False for sequential tests. \"\"\" was_called = self.__override_role_called self.__override_role_called", "``override_role`` was called.\"\"\" self.__override_role_called = True def _set_override_role_caught_exc(self): \"\"\"Helper for", "resource['id'] == self._admin_resource_id: return raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate(self): \"\"\"Calls the", "config.CONF LOG = logging.getLogger(__name__) class _ValidateListContext(object): \"\"\"Context class responsible for", "to admin cls._override_role(False) super(RbacUtilsMixin, cls).setup_clients() @classmethod def _prepare_role_inferences_mapping(cls): \"\"\"Preparing roles", "validation function. :raises RbacValidateListException: if the ``ctx.resources`` variable is not", "`list-all-role-inference-rules`_ keystone API returns all inference rules, which makes it", "cls.os_admin.roles_v3_client else: raise lib_exc.InvalidConfiguration( \"Patrole role overriding only supports v3", "raise rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id = admin_role_id cls._rbac_role_ids = rbac_role_ids # Adding", "be used as a mixin class alongside an instance of", "to seamlessly swap between admin credentials, needed for setup and", "self._admin_len = len(admin_resources) if not self._admin_len: raise rbac_exceptions.RbacValidateListException( reason=\"the list", "that ``override_role`` is called and reset its value to False", "cls).setup_clients() cls.hosts_client = cls.os_primary.hosts_client ... This class is responsible for", "\"\"\"Preparing roles mapping to support role inferences Making query to", "# return early if the user's roles on the project", "\"\"\"Override the role used by ``os_primary`` Tempest credentials. Temporarily change", "provider in auth_providers: provider.set_auth() @classmethod def _init_roles(cls): available_roles = cls.admin_roles_client.list_roles()['roles']", "than admin resources. \"\"\" if not len(self.resources): raise rbac_exceptions.RbacEmptyResponseBody() elif", "LOG.debug('Overriding role to: %s.', toggle_rbac_role) roles_already_present = False try: target_roles", "only # return early if the user's roles on the", "test_foo(self): # Allocate test-level resources here. with self.override_role(): # The", "``os_primary`` credentials to: * ``[patrole] rbac_test_roles`` before test execution *", "RbacValidateListException: if the ``ctx.resources`` variable is not assigned. \"\"\" if", "before attempting to authenticate. # Only sleep if a token", "import time from oslo_log import log as logging from oslo_utils", "list(res) @contextlib.contextmanager def override_role(self): \"\"\"Override the role used by ``os_primary``", "By doing so, it is possible to seamlessly swap between", "to ``[patrole] rbac_test_role`` * If False: role is set to", "we are switching to the # \"Member\" role, then we", "CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role:", "set to ``[identity] admin_role`` \"\"\" LOG.debug('Overriding role to: %s.', toggle_rbac_role)", "role that overrides default role of ``os_primary`` credentials. * If", "if not roles_already_present: cls._create_user_role_on_project(target_roles) except Exception as exp: with excutils.save_and_reraise_exception():", "for provider in auth_providers: provider.set_auth() @classmethod def _init_roles(cls): available_roles =", "role). By doing so, it is possible to seamlessly swap", "for validating list API actions. :param test_obj: Instance of ``tempest.test.BaseTestCase``.", "target_roles) # Do not override roles if `target_role` already exists.", "of resources available for member role ctx.resources = self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)[\"dscp_marking_rules\"]", "== ``[patrole] rbac_test_roles``. if not roles_already_present: time.sleep(1) for provider in", "data): roles = data.get(role_id, set()) for rid in roles.copy(): roles.update(process_roles(rid,", "all(rbac_role_ids): missing_roles += [role_name for role_name in roles if role_name", "\" Following roles were not found: %s.\" % ( \",", "``override_role_and_validate_list`` function. To validate will be used the ``_validate_len`` function.", "up. self._override_role(False) @classmethod def _override_role(cls, toggle_rbac_role=False): \"\"\"Private helper for overriding", "We do not use ``role_id in all_role_ids`` here # to", "for role in roles] # NOTE(felipemonteiro): We do not use", "data:: {\"role_inferences\": [ { \"implies\": [{\"id\": \"3\", \"name\": \"reader\"}], \"prior_role\":", "mixin. Example:: class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest): @classmethod def setup_clients(cls): super(BaseRbacTest, cls).setup_clients()", "return roles def convert_data(data): res = {} for rule in", "class is used in ``override_role_and_validate_list`` function and the result of", "an exact match. if set(role_ids) == set(all_role_ids): return True for", "False return was_called def _validate_override_role_caught_exc(self): \"\"\"Idempotently validate that exception was", "expected policy specified by \"rule\" in the decorator. self.foo_service.bar_api_call() #", "\"1\": [\"2\", \"3\"] # \"admin\": [\"member\", \"reader\"] } .. _list-all-role-inference-rules:", "# Fernet tokens are not subsecond aware so sleep to", ".. warning:: This function can alter user roles for pre-provisioned", "You may obtain # a copy of the License at", "roles.update(process_roles(rid, data)) return roles def convert_data(data): res = {} for", "a resource created before calling the ``override_role_and_validate_list`` function. To validate", "``[patrole] rbac_test_role`` * If False: role is set to ``[identity]", "resource created before calling the ``override_role_and_validate_list`` function. :return: py:class:`_ValidateListContext` object.", "clean up, and primary credentials, needed to perform the API", "\".join(missing_roles)) msg += \" Available roles: %s.\" % \", \".join(cls._role_map)", "clean up after this function. Example:: @rbac_rule_validation.action(service='test', rules=['a:test:rule']) def test_foo(self):", "any code below this # point in the test is", "finally: # Check whether an exception was raised. If so,", "This function can alter user roles for pre-provisioned credentials. Work", "only be used for validating list API actions. :param test_obj:", "be empty\") self._validate_func = self._validate_len elif admin_resource_id and admin_resources is", "= rule['prior_role']['id'] implies = {r['id'] for r in rule['implies']} res[prior_role]", "Automatically switches to admin role after test execution. :returns: None", "function must be assigned to the ``ctx.resources`` variable. Example:: with", "v in cls._role_map.items()}) @classmethod def _create_user_role_on_project(cls, role_ids): for role_id in", "# \"member\": [\"reader\"], \"1\": [\"2\", \"3\"] # \"admin\": [\"member\", \"reader\"]", "self._admin_len > len(self.resources): raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate_resource(self): \"\"\"Validates that the", "\"\"\"Returns list of auth_providers used within test. Tests may redefine", "def setup_clients(cls): if CONF.identity_feature_enabled.api_v3: admin_roles_client = cls.os_admin.roles_v3_client else: raise lib_exc.InvalidConfiguration(", "admin admin_resource_id = ( self.ntp_client.create_dscp_marking_rule() [\"dscp_marking_rule\"][\"id']) with self.override_role_and_validate_list( admin_resource_id=admin_resource_id) as", "is called and reset its value to False for sequential", "data)) return roles def convert_data(data): res = {} for rule", "@classmethod def _list_and_clear_user_roles_on_project(cls, role_ids): roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] all_role_ids", "software # distributed under the License is distributed on an", "(the \"License\"); you may # not use this file except", "Example:: # the resource created by admin admin_resource_id = (", "to: * ``[patrole] rbac_test_roles`` before test execution * ``[identity] admin_role``", "roles on the # project includes \"admin\" and \"Member\", and", "perform Patrole class setup for a base RBAC class. Child", "not executed. \"\"\" self._set_override_role_called() self._override_role(True) try: # Execute the test.", "_validate_override_role_caught_exc(self): \"\"\"Idempotently validate that exception was caught inside ``override_role``, so", "r in available_roles} LOG.debug('Available roles: %s', cls._role_map.keys()) rbac_role_ids = []", "_ValidateListContext(object): \"\"\"Context class responsible for validation of the list functions.", "exception was caught inside ``override_role``, so that, by process of", "return early if the user's roles on the project are", "cls._list_and_clear_user_roles_on_project( cls._orig_roles) if not roles_already_present: cls._create_user_role_on_project(cls._orig_roles) @classmethod def setup_clients(cls): if", "Instance of ``tempest.test.BaseTestCase``. :param list admin_resources: The list of resources", "= cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] all_role_ids = [role['id'] for role in", "validating list API actions. :param test_obj: Instance of ``tempest.test.BaseTestCase``. :param", "admin_resource_id self._validate_func = self._validate_resource else: raise rbac_exceptions.RbacValidateListException( reason=\"admin_resources and admin_resource_id", "that # if the API call above threw an exception,", "for future validation. exc = sys.exc_info()[0] if exc is not", "\"name\": \"reader\"}], \"prior_role\": {\"id\": \"2\", \"name\": \"member\"} }, { \"implies\":", "available_roles = cls.admin_roles_client.list_roles()['roles'] cls._role_map = {r['name']: r['id'] for r in", "with excutils.save_and_reraise_exception(): LOG.exception(exp) finally: auth_providers = cls.get_auth_providers() for provider in", "by \"rule\" in the decorator. self.foo_service.bar_api_call() # The role is", "the API call which does policy enforcement. The primary credentials", "will optimize test runtime in the case where # ``[identity]", "returned instead of exceptions. This helper validates that unauthorized roles", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "validates that unauthorized roles only return a subset of the", "all inference rules, which makes it possible to prepare roles", "more robust via a context is admin # lookup. return", "list API action. List actions usually do soft authorization: partial", "ctx ctx._validate() @classmethod def get_auth_providers(cls): \"\"\"Returns list of auth_providers used", "RBAC class. Child classes should not use this mixin. Example::", "by admin admin_resource_id = ( self.ntp_client.create_dscp_marking_rule() [\"dscp_marking_rule\"][\"id']) with self.override_role_and_validate_list( admin_resource_id=admin_resource_id)", "list admin_resources: The list of resources received before calling the", "admin_roles_client = None @classmethod def restore_roles(cls): if cls._orig_roles: LOG.info(\"Restoring original", "Corporation. # All Rights Reserved. # # Licensed under the", "if resource['id'] == self._admin_resource_id: return raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate(self): \"\"\"Calls", "= config.CONF LOG = logging.getLogger(__name__) class _ValidateListContext(object): \"\"\"Context class responsible", "r in roles) for role in res.copy(): role_id = self.__class__._role_map.get(role)", "optimize test runtime in the case where # ``[identity] admin_role``", "\"\"\" roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role is", "rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id = admin_role_id cls._rbac_role_ids = rbac_role_ids # Adding backward", "needed to perform the API call which does policy enforcement.", "or implied. See the # License for the specific language", "missing_roles += [role_name for role_name in roles if role_name not", "use ``role_id in all_role_ids`` here # to avoid over-permission errors:", "_role_map = None _role_inferences_mapping = None _orig_roles = [] admin_roles_client", "mixin class alongside an instance of :py:class:`tempest.test.BaseTestCase` to perform Patrole", "# project includes \"admin\" and \"Member\", and we are switching", "ctx: # the list of resources available for member role", "+= [role_name for role_name in roles if role_name not in", "resources cannot be empty\") self._validate_func = self._validate_len elif admin_resource_id and", "equals the admin role. :returns: True if ``rbac_test_roles`` contain the", "perform the API call which does policy enforcement. The primary", "\"implies\": [{\"id\": \"2\", \"name\": \"member\"}], \"prior_role\": {\"id\": \"1\", \"name\": \"admin\"}", "provider in auth_providers: provider.clear_auth() # Fernet tokens are not subsecond", "role. Thus, we only # return early if the user's", "of resources is less than admin resources. \"\"\" if not", "Thus, we only # return early if the user's roles", "to the # \"Member\" role, then we must delete the", "else: raise rbac_exceptions.RbacValidateListException( reason=\"admin_resources and admin_resource_id are mutually \" \"exclusive\")", "test_obj: Instance of ``tempest.test.BaseTestCase``. :param list admin_resources: The list of", "self.override_role_and_validate_list(...) as ctx: ctx.resources = list_function() \"\"\" def __init__(self, admin_resources=None,", "with self.override_role_and_validate_list( admin_resource_id=admin_resource_id) as ctx: # the list of resources", "test execution. :returns: None .. warning:: This function can alter", "@classmethod def setup_clients(cls): super(BaseRbacTest, cls).setup_clients() cls.hosts_client = cls.os_primary.hosts_client ... This", "called and reset its value to False for sequential tests.", "not all(rbac_role_ids): missing_roles += [role_name for role_name in roles if", "An ID of a resource created before calling the ``override_role_and_validate_list``", "the mapping:: { \"2\": [\"3\"], # \"member\": [\"reader\"], \"1\": [\"2\",", "``[patrole] rbac_test_roles``. if not roles_already_present: time.sleep(1) for provider in auth_providers:", "roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] cls._orig_roles = [role['id'] for role", "if not all([admin_role_id, all(rbac_role_ids)]): missing_roles = [] msg = (\"Could", "admin role. \"\"\" roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "= None _project_id = None _user_id = None _role_map =", "already exists. if not roles_already_present: cls._create_user_role_on_project(target_roles) except Exception as exp:", "admin_role_id: missing_roles.append(CONF.identity.admin_role) if not all(rbac_role_ids): missing_roles += [role_name for role_name", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "``[patrole] rbac_test_roles`` before test execution * ``[identity] admin_role`` after test", "are mutually \" \"exclusive\") def _validate_len(self): \"\"\"Validates that the number", "self.__class__._role_map.get(role) implied_roles = self.__class__._role_inferences_mapping.get( role_id, set()) role_names = {self.__class__._role_map[rid] for", "after test execution Automatically switches to admin role after test", "resources. Should only be used for validating list API actions.", "setup_clients(cls): if CONF.identity_feature_enabled.api_v3: admin_roles_client = cls.os_admin.roles_v3_client else: raise lib_exc.InvalidConfiguration( \"Patrole", "import rbac_exceptions CONF = config.CONF LOG = logging.getLogger(__name__) class _ValidateListContext(object):", "alter user roles for pre-provisioned credentials. Work is underway to", "cls._orig_roles) roles_already_present = cls._list_and_clear_user_roles_on_project( cls._orig_roles) if not roles_already_present: cls._create_user_role_on_project(cls._orig_roles) @classmethod", "credentials, needed for setup and clean up, and primary credentials,", "``role_id in all_role_ids`` here # to avoid over-permission errors: if", "if exc is not None: self._set_override_role_caught_exc() # This code block", "admin_resources: The list of resources received before calling the ``override_role_and_validate_list``", "not subsecond aware so sleep to ensure we are #", "exact match. if set(role_ids) == set(all_role_ids): return True for role", "cls._prepare_role_inferences_mapping() cls._init_roles() # Store the user's original roles and rollback", "# ``[identity] admin_role`` == ``[patrole] rbac_test_roles``. if not roles_already_present: time.sleep(1)", ":param UUID admin_resource_id: An ID of a resource created before", "function. :raises RbacValidateListException: if the ``ctx.resources`` variable is not assigned.", "= self.__override_role_caught_exc self.__override_role_caught_exc = False return caught_exception def is_admin(): \"\"\"Verifies", "self.override_role(): # The role for `os_primary` has now been overridden.", "@classmethod def _init_roles(cls): available_roles = cls.admin_roles_client.list_roles()['roles'] cls._role_map = {r['name']: r['id']", "auth_providers. \"\"\" return [cls.os_primary.auth_provider] def _set_override_role_called(self): \"\"\"Helper for tracking whether", "KIND, either express or implied. See the # License for", "calling the ``override_role_and_validate_list`` function. :param UUID admin_resource_id: An ID of", "both ``admin_resources`` and ``admin_resource_id`` are set or unset. \"\"\" self.resources", "overriding ``os_primary`` Tempest credentials. :param toggle_rbac_role: Boolean value that controls", "is invalid). \"\"\" caught_exception = self.__override_role_caught_exc self.__override_role_caught_exc = False return", "through the raw data:: {\"role_inferences\": [ { \"implies\": [{\"id\": \"3\",", "value that controls the role that overrides default role of", "up after this function. Example:: @rbac_rule_validation.action(service='test', rules=['a:test:rule']) def test_foo(self): #", "both of which are required for \" \"RBAC testing.\") if", "toggle_rbac_role else [cls._admin_role_id]) roles_already_present = cls._list_and_clear_user_roles_on_project( target_roles) # Do not", "the raw data:: {\"role_inferences\": [ { \"implies\": [{\"id\": \"3\", \"name\":", "function. To validate will be used the ``_validate_resource`` function. :raises", "resources received before calling the ``override_role_and_validate_list`` function. :param UUID admin_resource_id:", "rbac_role_ids # Adding backward mapping cls._role_map.update({v: k for k, v", "cls._user_id, role['id']) return False @contextlib.contextmanager def override_role_and_validate_list(self, admin_resources=None, admin_resource_id=None): \"\"\"Call", "The list of resources received before calling the ``override_role_and_validate_list`` function.", "support role inferences Making query to `list-all-role-inference-rules`_ keystone API returns", "if not roles_already_present: time.sleep(1) for provider in auth_providers: provider.set_auth() @classmethod", "before calling the ``override_role_and_validate_list`` function. :param UUID admin_resource_id: An ID", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "API endpoint that enforces the # expected policy specified by", "role. \"\"\" roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role", "and \"Member\", and we are switching to the # \"Member\"", "len(self.resources): raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate_resource(self): \"\"\"Validates that the admin resource", "True if ``rbac_test_roles`` contain the admin role. \"\"\" roles =", "authorization: partial or empty response bodies are returned instead of", "is not assigned\") self._validate_func() class RbacUtilsMixin(object): \"\"\"Utility mixin responsible for", "Check whether an exception was raised. If so, remember that", "CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role) roles = list(set(roles)) #", "of the # test. Automatically switch back to the admin", "if exception raised during override_role. self.__override_role_caught_exc = False _admin_role_id =", "implied. See the # License for the specific language governing", "= _ValidateListContext(admin_resources, admin_resource_id) with self.override_role(): yield ctx ctx._validate() @classmethod def", "This helper validates that unauthorized roles only return a subset", "mapping cls._role_map.update({v: k for k, v in cls._role_map.items()}) @classmethod def", "_prepare_role_inferences_mapping(cls): \"\"\"Preparing roles mapping to support role inferences Making query", "not roles_already_present: cls._create_user_role_on_project(cls._orig_roles) @classmethod def setup_clients(cls): if CONF.identity_feature_enabled.api_v3: admin_roles_client =", "Example:: class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest): @classmethod def setup_clients(cls): super(BaseRbacTest, cls).setup_clients() cls.hosts_client", "__init__(self, *args, **kwargs): super(RbacUtilsMixin, self).__init__(*args, **kwargs) # Shows if override_role", "to prepare roles mapping. It walks recursively through the raw", "before test execution * ``[identity] admin_role`` after test execution Automatically", "setup for a base RBAC class. Child classes should not", "on the # project includes \"admin\" and \"Member\", and we", "missing_roles = [] msg = (\"Could not find `[patrole] rbac_test_roles`", "] } and converts it to the mapping:: { \"2\":", "to the mapping:: { \"2\": [\"3\"], # \"member\": [\"reader\"], \"1\":", "be used the ``_validate_len`` function. :param UUID admin_resource_id: An ID", "admin_roles_client = cls.os_admin.roles_v3_client else: raise lib_exc.InvalidConfiguration( \"Patrole role overriding only", "# up. self._override_role(False) @classmethod def _override_role(cls, toggle_rbac_role=False): \"\"\"Private helper for", "with self.override_role(): yield ctx ctx._validate() @classmethod def get_auth_providers(cls): \"\"\"Returns list", "res, roles) return list(res) @contextlib.contextmanager def override_role(self): \"\"\"Override the role", "validate that exception was caught inside ``override_role``, so that, by", "False return caught_exception def is_admin(): \"\"\"Verifies whether the current test", "always cycle between roles defined by ``CONF.identity.admin_role`` and ``CONF.patrole.rbac_test_roles``. \"\"\"", "[ { \"implies\": [{\"id\": \"3\", \"name\": \"reader\"}], \"prior_role\": {\"id\": \"2\",", "admin_resources is None: self._admin_resource_id = admin_resource_id self._validate_func = self._validate_resource else:", "sleep to ensure we are # passing the second boundary", "credentials. :param toggle_rbac_role: Boolean value that controls the role that", "so that, by process of elimination, it can be determined", "roles mapping to support role inferences Making query to `list-all-role-inference-rules`_", "If False: role is set to ``[identity] admin_role`` \"\"\" LOG.debug('Overriding", "bodies are returned instead of exceptions. This helper validates that", "@classmethod def _create_user_role_on_project(cls, role_ids): for role_id in role_ids: cls.admin_roles_client.create_user_role_on_project( cls._project_id,", "mutually \" \"exclusive\") def _validate_len(self): \"\"\"Validates that the number of", "tokens are not subsecond aware so sleep to ensure we", "\"3\"] # \"admin\": [\"member\", \"reader\"] } .. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules \"\"\"", "obtain # a copy of the License at # #", "_validate_len(self): \"\"\"Validates that the number of resources is less than", "that, by process of elimination, it can be determined whether", "TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role) roles", ".. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules \"\"\" # noqa: E501 def process_roles(role_id, data):", "set(all_role_ids): return True for role in roles: cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id, cls._user_id,", "= None _orig_roles = [] admin_roles_client = None @classmethod def", "Available roles: %s.\" % \", \".join(cls._role_map) raise rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id =", "(\"Could not find `[patrole] rbac_test_roles` or \" \"`[identity] admin_role`, both", "be used, not both. :param list admin_resources: The list of", "def _validate_resource(self): \"\"\"Validates that the admin resource is present in", "possible to seamlessly swap between admin credentials, needed for setup", "def _set_override_role_caught_exc(self): \"\"\"Helper for tracking whether exception was thrown inside", "def get_auth_providers(cls): \"\"\"Returns list of auth_providers used within test. Tests", "role used by ``os_primary`` Tempest credentials. Temporarily change the role", "as logging from oslo_utils import excutils from tempest import config", "a list function must be assigned to the ``ctx.resources`` variable.", "= self._validate_resource else: raise rbac_exceptions.RbacValidateListException( reason=\"admin_resources and admin_resource_id are mutually", "rbac_exceptions.RbacValidateListException( reason=\"admin_resources and admin_resource_id are mutually \" \"exclusive\") def _validate_len(self):", "[cls._admin_role_id]) roles_already_present = cls._list_and_clear_user_roles_on_project( target_roles) # Do not override roles", "%s\", cls._orig_roles) roles_already_present = cls._list_and_clear_user_roles_on_project( cls._orig_roles) if not roles_already_present: cls._create_user_role_on_project(cls._orig_roles)", "recursively through the raw data:: {\"role_inferences\": [ { \"implies\": [{\"id\":", "roles and rollback after testing. roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles']", "# Store the user's original roles and rollback after testing.", "execution * ``[identity] admin_role`` after test execution Automatically switches to", "in roles] cls.addClassResourceCleanup(cls.restore_roles) # Change default role to admin cls._override_role(False)", "for role_name in roles if role_name not in cls._role_map] msg", "only supports v3 identity API.\") cls.admin_roles_client = admin_roles_client cls._project_id =", "\"Member\", and we are switching to the # \"Member\" role,", "self.__override_role_called = False return was_called def _validate_override_role_caught_exc(self): \"\"\"Idempotently validate that", "received before calling the ``override_role_and_validate_list`` function. :param UUID admin_resource_id: An", "self.__override_role_called = False # Shows if exception raised during override_role.", "prepare roles mapping. It walks recursively through the raw data::", "`os_primary` has now been overridden. Within # this block, call", "if admin_resources is not None and not admin_resource_id: self._admin_len =", "boundary before attempting to authenticate. # Only sleep if a", "r['id'] for r in available_roles} LOG.debug('Available roles: %s', cls._role_map.keys()) rbac_role_ids", "code block is always executed, no matter the result of", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "roles): \"\"\"Extending given roles with roles from mapping Examples:: [\"admin\"]", "self._set_override_role_called() self._override_role(True) try: # Execute the test. yield finally: #", "if role_name not in cls._role_map] msg += \" Following roles", "for validation of the list functions. This class is used", "this mixin. Example:: class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest): @classmethod def setup_clients(cls): super(BaseRbacTest,", "try: target_roles = (cls._rbac_role_ids if toggle_rbac_role else [cls._admin_role_id]) roles_already_present =", "occurred as a result of role # overriding. This will", "always executed, no matter the result of the # test.", "roles def convert_data(data): res = {} for rule in data:", "role_id in data: res[role_id] = process_roles(role_id, data) return res def", "member role ctx.resources = self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)[\"dscp_marking_rules\"] \"\"\" ctx = _ValidateListContext(admin_resources,", "used by ``os_primary`` Tempest credentials. Temporarily change the role used", "automatically. Note that # if the API call above threw", "not admin_resource_id: self._admin_len = len(admin_resources) if not self._admin_len: raise rbac_exceptions.RbacValidateListException(", "Temporarily change the role used by ``os_primary`` credentials to: *", "whether one was thrown outside (which is invalid). \"\"\" caught_exception", "to authenticate. # Only sleep if a token revocation occurred", "raw_data = cls.admin_roles_client.list_all_role_inference_rules() data = convert_data(raw_data['role_inferences']) res = {} for", "if self.resources is None: raise rbac_exceptions.RbacValidateListException( reason=\"ctx.resources is not assigned\")", "the ``override_role_and_validate_list`` function. :param UUID admin_resource_id: An ID of a", "self.__class__._role_inferences_mapping.get( role_id, set()) role_names = {self.__class__._role_map[rid] for rid in implied_roles}", "roles if `target_role` already exists. if not roles_already_present: cls._create_user_role_on_project(target_roles) except", "[role['id'] for role in roles] cls.addClassResourceCleanup(cls.restore_roles) # Change default role", "# TODO(felipemonteiro): Make this more robust via a context is", "credentials = ['primary', 'admin'] def __init__(self, *args, **kwargs): super(RbacUtilsMixin, self).__init__(*args,", "[\"2\", \"3\"] # \"admin\": [\"member\", \"reader\"] } .. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules", "of the list functions. This class is used in ``override_role_and_validate_list``", "used the ``_validate_len`` function. :param UUID admin_resource_id: An ID of", "between roles defined by ``CONF.identity.admin_role`` and ``CONF.patrole.rbac_test_roles``. \"\"\" credentials =", "we are # passing the second boundary before attempting to", "return True for role in roles: cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id, cls._user_id, role['id'])", "2.0 (the \"License\"); you may # not use this file", "soft authorization: partial or empty response bodies are returned instead", "number of resources is less than admin resources. \"\"\" if", "are set or unset. \"\"\" self.resources = None if admin_resources", "set()) role_names = {self.__class__._role_map[rid] for rid in implied_roles} res.update(role_names) LOG.debug('All", "were not found: %s.\" % ( \", \".join(missing_roles)) msg +=", "not admin_role_id: missing_roles.append(CONF.identity.admin_role) if not all(rbac_role_ids): missing_roles += [role_name for", "roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed", "role is switched back to admin automatically. Note that #", "# Adding backward mapping cls._role_map.update({v: k for k, v in", "object. Example:: # the resource created by admin admin_resource_id =", "role for test clean # up. self._override_role(False) @classmethod def _override_role(cls,", "by applicable law or agreed to in writing, software #", "in roles if role_name not in cls._role_map] msg += \"", "of a resource created before calling the ``override_role_and_validate_list`` function. To", "Child classes should not use this mixin. Example:: class BaseRbacTest(rbac_utils.RbacUtilsMixin,", "a list API action. List actions usually do soft authorization:", "that the number of resources is less than admin resources.", "once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role) roles = list(set(roles))", "attempting to authenticate. # Only sleep if a token revocation", "the admin role. :returns: True if ``rbac_test_roles`` contain the admin", "not None and not admin_resource_id: self._admin_len = len(admin_resources) if not", "The role for `os_primary` has now been overridden. Within #", "elif admin_resource_id and admin_resources is None: self._admin_resource_id = admin_resource_id self._validate_func", "class _ValidateListContext(object): \"\"\"Context class responsible for validation of the list", "= False _admin_role_id = None _rbac_role_ids = None _project_id =", "in implied_roles} res.update(role_names) LOG.debug('All needed roles: %s; Base roles: %s',", "Make this more robust via a context is admin #", "was raised. If so, remember that # for future validation.", "role of ``os_primary`` credentials. * If True: role is set", "False _admin_role_id = None _rbac_role_ids = None _project_id = None", "role in roles: cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id, cls._user_id, role['id']) return False @contextlib.contextmanager", "True def _set_override_role_caught_exc(self): \"\"\"Helper for tracking whether exception was thrown", "# All Rights Reserved. # # Licensed under the Apache", "in data: res[role_id] = process_roles(role_id, data) return res def get_all_needed_roles(self,", "# TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: if", "\"\"\" self.__override_role_caught_exc = True def _validate_override_role_called(self): \"\"\"Idempotently validate that ``override_role``", "roles: %s', res, roles) return list(res) @contextlib.contextmanager def override_role(self): \"\"\"Override", "credential's role (i.e. ``os_primary`` role). By doing so, it is", "API returns all inference rules, which makes it possible to", "LOG.exception(exp) finally: auth_providers = cls.get_auth_providers() for provider in auth_providers: provider.clear_auth()", "target_roles = (cls._rbac_role_ids if toggle_rbac_role else [cls._admin_role_id]) roles_already_present = cls._list_and_clear_user_roles_on_project(", "roles on the project are an exact match. if set(role_ids)", "applicable law or agreed to in writing, software # distributed", "_user_id = None _role_map = None _role_inferences_mapping = None _orig_roles", "``admin_resources`` or ``admin_resource_id`` should be used, not both. :param list", "get_all_needed_roles(self, roles): \"\"\"Extending given roles with roles from mapping Examples::", "not len(self.resources): raise rbac_exceptions.RbacEmptyResponseBody() elif self._admin_len > len(self.resources): raise rbac_exceptions.RbacPartialResponseBody(body=self.resources)", "to the admin role for test clean # up. self._override_role(False)", "except Exception as exp: with excutils.save_and_reraise_exception(): LOG.exception(exp) finally: auth_providers =", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "# under the License. import contextlib import sys import time", "in the case where # ``[identity] admin_role`` == ``[patrole] rbac_test_roles``.", "to: %s.', toggle_rbac_role) roles_already_present = False try: target_roles = (cls._rbac_role_ids", "= [] msg = (\"Could not find `[patrole] rbac_test_roles` or", "None .. warning:: This function can alter user roles for", "list of roles on the # project includes \"admin\" and", "data = convert_data(raw_data['role_inferences']) res = {} for role_id in data:", "roles = list(set(roles)) # TODO(felipemonteiro): Make this more robust via", "cls._role_map.items()}) @classmethod def _create_user_role_on_project(cls, role_ids): for role_id in role_ids: cls.admin_roles_client.create_user_role_on_project(", "__init__(self, admin_resources=None, admin_resource_id=None): \"\"\"Constructor for ``ValidateListContext``. Either ``admin_resources`` or ``admin_resource_id``", "cls._orig_roles) if not roles_already_present: cls._create_user_role_on_project(cls._orig_roles) @classmethod def setup_clients(cls): if CONF.identity_feature_enabled.api_v3:", "that controls the role that overrides default role of ``os_primary``", "action. List actions usually do soft authorization: partial or empty", "set or unset. \"\"\" self.resources = None if admin_resources is", "ctx = _ValidateListContext(admin_resources, admin_resource_id) with self.override_role(): yield ctx ctx._validate() @classmethod", "rbac_exceptions CONF = config.CONF LOG = logging.getLogger(__name__) class _ValidateListContext(object): \"\"\"Context", "mapping. It walks recursively through the raw data:: {\"role_inferences\": [", "tests. \"\"\" was_called = self.__override_role_called self.__override_role_called = False return was_called", "role in roles] # NOTE(felipemonteiro): We do not use ``role_id", "% \", \".join(cls._role_map) raise rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id = admin_role_id cls._rbac_role_ids =", "role_id, set()) role_names = {self.__class__._role_map[rid] for rid in implied_roles} res.update(role_names)", "= cls._list_and_clear_user_roles_on_project( cls._orig_roles) if not roles_already_present: cls._create_user_role_on_project(cls._orig_roles) @classmethod def setup_clients(cls):", "Tempest credentials. :param toggle_rbac_role: Boolean value that controls the role", "been overridden. Within # this block, call the API endpoint", "and ``CONF.patrole.rbac_test_roles``. \"\"\" credentials = ['primary', 'admin'] def __init__(self, *args,", "# License for the specific language governing permissions and limitations", "the second boundary before attempting to authenticate. # Only sleep", "party client auth_providers. \"\"\" return [cls.os_primary.auth_provider] def _set_override_role_called(self): \"\"\"Helper for", "be used the ``_validate_resource`` function. :raises RbacValidateListException: if both ``admin_resources``", "self._admin_resource_id = admin_resource_id self._validate_func = self._validate_resource else: raise rbac_exceptions.RbacValidateListException( reason=\"admin_resources", "[{\"id\": \"3\", \"name\": \"reader\"}], \"prior_role\": {\"id\": \"2\", \"name\": \"member\"} },", "cls._project_id, cls._user_id, role['id']) return False @contextlib.contextmanager def override_role_and_validate_list(self, admin_resources=None, admin_resource_id=None):", "This class is used in ``override_role_and_validate_list`` function and the result", "roles_already_present: cls._create_user_role_on_project(target_roles) except Exception as exp: with excutils.save_and_reraise_exception(): LOG.exception(exp) finally:", "\"admin\" role. Thus, we only # return early if the", "client auth_providers. \"\"\" return [cls.os_primary.auth_provider] def _set_override_role_called(self): \"\"\"Helper for tracking", "cls._role_map] msg += \" Following roles were not found: %s.\"", "role inferences Making query to `list-all-role-inference-rules`_ keystone API returns all", "test execution Automatically switches to admin role after test execution.", "and not admin_resource_id: self._admin_len = len(admin_resources) if not self._admin_len: raise", "cls._project_id, cls._user_id)['roles'] cls._orig_roles = [role['id'] for role in roles] cls.addClassResourceCleanup(cls.restore_roles)", "walks recursively through the raw data:: {\"role_inferences\": [ { \"implies\":", "class is responsible for overriding the value of the primary", "Should only be used for validating list API actions. :param", "log as logging from oslo_utils import excutils from tempest import", "False # Shows if exception raised during override_role. self.__override_role_caught_exc =", "of auth_providers used within test. Tests may redefine this method", "``override_role`` and validate RBAC for a list API action. List", "_ValidateListContext(admin_resources, admin_resource_id) with self.override_role(): yield ctx ctx._validate() @classmethod def get_auth_providers(cls):", "self._validate_func = self._validate_len elif admin_resource_id and admin_resources is None: self._admin_resource_id", "to perform Patrole class setup for a base RBAC class.", "whether ``override_role`` was called.\"\"\" self.__override_role_called = True def _set_override_role_caught_exc(self): \"\"\"Helper", "CONF = config.CONF LOG = logging.getLogger(__name__) class _ValidateListContext(object): \"\"\"Context class", "# Shows if override_role was called. self.__override_role_called = False #", "inside ``override_role``. \"\"\" self.__override_role_caught_exc = True def _validate_override_role_called(self): \"\"\"Idempotently validate", "language governing permissions and limitations # under the License. import", "self._validate_len elif admin_resource_id and admin_resources is None: self._admin_resource_id = admin_resource_id", "License. You may obtain # a copy of the License", "@classmethod def _override_role(cls, toggle_rbac_role=False): \"\"\"Private helper for overriding ``os_primary`` Tempest", "convert_data(data): res = {} for rule in data: prior_role =", "for resource in self.resources: if resource['id'] == self._admin_resource_id: return raise", "ANY KIND, either express or implied. See the # License", "role. Should be used as a mixin class alongside an", "{\"role_inferences\": [ { \"implies\": [{\"id\": \"3\", \"name\": \"reader\"}], \"prior_role\": {\"id\":", "that unauthorized roles only return a subset of the available", "None: self._admin_resource_id = admin_resource_id self._validate_func = self._validate_resource else: raise rbac_exceptions.RbacValidateListException(", "to support role inferences Making query to `list-all-role-inference-rules`_ keystone API", "on the project are an exact match. if set(role_ids) ==", "res.copy(): role_id = self.__class__._role_map.get(role) implied_roles = self.__class__._role_inferences_mapping.get( role_id, set()) role_names", "empty\") self._validate_func = self._validate_len elif admin_resource_id and admin_resources is None:", "def _list_and_clear_user_roles_on_project(cls, role_ids): roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] all_role_ids =", "makes it possible to prepare roles mapping. It walks recursively", "= sys.exc_info()[0] if exc is not None: self._set_override_role_caught_exc() # This", "return False @contextlib.contextmanager def override_role_and_validate_list(self, admin_resources=None, admin_resource_id=None): \"\"\"Call ``override_role`` and", "cls._create_user_role_on_project(target_roles) except Exception as exp: with excutils.save_and_reraise_exception(): LOG.exception(exp) finally: auth_providers", "CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: if not roles: roles.append(CONF.patrole.rbac_test_role) for", "cls._role_map.get(CONF.identity.admin_role) if not all([admin_role_id, all(rbac_role_ids)]): missing_roles = [] msg =", "or \" \"`[identity] admin_role`, both of which are required for", "excutils.save_and_reraise_exception(): LOG.exception(exp) finally: auth_providers = cls.get_auth_providers() for provider in auth_providers:", "cls.admin_roles_client.create_user_role_on_project( cls._project_id, cls._user_id, role_id) @classmethod def _list_and_clear_user_roles_on_project(cls, role_ids): roles =", "prior_role = rule['prior_role']['id'] implies = {r['id'] for r in rule['implies']}", "back to admin automatically. Note that # if the API", "exception, any code below this # point in the test", "class RbacUtilsMixin(object): \"\"\"Utility mixin responsible for switching ``os_primary`` role. Should" ]
[ "self.Dismiss() self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd() def KeyReturn(self): self.OnLeftDown(None) def onKeyPress(self, e): c", "parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc', width=200) self.lc.AppendColumn('Thành phần',", "enumerate(self.d_l): self.lc.Append( [item.name, item.element, item.quantity, item.sale_price, item.usage]) if item.quantity <=", "def SetStringValue(self, val): idx = self.lc.FindItem(-1, val) if idx !=", "self.onTextChange) self.SetHint(\"Nhấn Enter để search thuốc\") self._drugWH = None self.EnablePopupAnimation(enable=False)", "c == wx.WXK_DOWN: self.KeyDown() elif c == wx.WXK_UP: self.KeyUp() elif", "self.ComboCtrl.drugWH = self.d_l[self.value] self.Dismiss() self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd() except IndexError: self.Dismiss() def", "DrugPopup(wx.ComboPopup): def __init__(self, parent): super().__init__() self.lc = None self.mv =", "== \"posix\": if e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]: if not self.IsPopupShown():", "idx != wx.NOT_FOUND: self.lc.Select(idx) def GetStringValue(self): if self.value >= 0:", "self.Dismiss() self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd() except IndexError: self.Dismiss() def OnPopup(self): self.Init() self.Update(self.ComboCtrl.Value)", "if not self.IsPopupShown(): self.Popup() self.SetInsertionPointEnd() if os.name == \"posix\": if", "self.lc = wx.ListCtrl( parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc',", "search thuốc\") self._drugWH = None self.EnablePopupAnimation(enable=False) @property def drugWH(self): return", "OnMotion(self, e): item, flags = self.lc.HitTest(e.GetPosition()) if item >= 0:", "để search thuốc\") self._drugWH = None self.EnablePopupAnimation(enable=False) @property def drugWH(self):", "self.ChangeValue(a) self.SetInsertionPointEnd() e.Skip() def onTextChange(self, e): if os.name == \"nt\":", "wx.WXK_DOWN, wx.WXK_ESCAPE]: if self.IsPopupShown(): a = self.Value self.Dismiss() self.ChangeValue(a) self.SetInsertionPointEnd()", "DrugPopup(self) self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Bind(wx.EVT_TEXT, self.onTextChange) self.SetHint(\"Nhấn Enter để search", "from initialize import * from core.db.db_func import query_linedrug_list import os", "KeyDown(self): if self.lc.ItemCount > 0: if self.curitem < (self.lc.ItemCount -", "self.Parent if dwh: pg.usage_unit.Label = dwh.usage_unit + \" \" pg.sale_unit.Label", "\"nt\": if e.String == \"\": self.Clear() elif len(e.String) >= 1:", "KeyUp(self): if self.lc.ItemCount > 0: if self.curitem > 0: self.curitem", "lượng') self.lc.AppendColumn('Đơn giá') self.lc.AppendColumn('Cách dùng', width=100) self.lc.Bind(wx.EVT_MOTION, self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)", "self.lc.AppendColumn('Đơn giá') self.lc.AppendColumn('Cách dùng', width=100) self.lc.Bind(wx.EVT_MOTION, self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN,", "None self.EnablePopupAnimation(enable=False) @property def drugWH(self): return self._drugWH @drugWH.setter def drugWH(self,", "self.d_l = [] def Create(self, parent): self.lc = wx.ListCtrl( parent,", "phần', width=150) self.lc.AppendColumn('Số lượng') self.lc.AppendColumn('Đơn giá') self.lc.AppendColumn('Cách dùng', width=100) self.lc.Bind(wx.EVT_MOTION,", "IndexError: self.Dismiss() def OnPopup(self): self.Init() self.Update(self.ComboCtrl.Value) if self.lc.ItemCount > 0:", "(self.lc.ItemCount - 1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyDown(self):", "flags = self.lc.HitTest(e.GetPosition()) if item >= 0: self.lc.Select(item) self.curitem =", "== wx.WXK_RETURN: self.KeyReturn() class DrugPicker(wx.ComboCtrl): def __init__(self, parent): super().__init__(parent, size=drugctrl_size,", "if self.curitem > 0: self.curitem -= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) else:", "if dwh: pg.usage_unit.Label = dwh.usage_unit + \" \" pg.sale_unit.Label =", "self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyUp(self): if self.lc.ItemCount > 0: if self.curitem", "not self.IsPopupShown(): self.Popup() self.SetInsertionPointEnd() if os.name == \"posix\": if e.String", "self.mv = parent.mv self.drug_popup = DrugPopup(self) self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Bind(wx.EVT_TEXT,", "self.ChangeValue('') pg.dosage_per.ChangeValue('') pg.usage_unit.Label = '{Đơn vị} ' pg.times.ChangeValue(\"\") pg.quantity.ChangeValue(\"\") pg.sale_unit.Label", "width=100) self.lc.Bind(wx.EVT_MOTION, self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Update() return True", "pg.dosage_per.ChangeValue('') pg.usage_unit.Label = '{Đơn vị} ' pg.times.ChangeValue(\"\") pg.quantity.ChangeValue(\"\") pg.sale_unit.Label =", "c = e.GetKeyCode() if c == wx.WXK_DOWN: self.KeyDown() elif c", "wx.WXK_ESCAPE]: if self.IsPopupShown(): a = self.Value self.Dismiss() self.ChangeValue(a) self.SetInsertionPointEnd() e.Skip()", "maxHeight): return super().GetAdjustedSize(*popup_size) def Update(self, s=''): self.lc.DeleteAllItems() self.d_l = list(filter(", "drugWH(self): return self._drugWH @drugWH.setter def drugWH(self, dwh): self._drugWH = dwh", "def drugWH(self, dwh): self._drugWH = dwh pg = self.Parent if", "parent): self.lc = wx.ListCtrl( parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER)", "OnLeftDown(self, e): try: self.value = self.curitem self.ComboCtrl.drugWH = self.d_l[self.value] self.Dismiss()", "else: e.Skip() else: if e.GetKeyCode() not in [wx.WXK_RETURN, wx.WXK_UP, wx.WXK_DOWN,", "def Clear(self): self.drugWH = None def refreshPopup(self): self.drug_popup.init_d_l = query_linedrug_list(self.mv.sess).all()", "def onTextChange(self, e): if os.name == \"nt\": if e.String ==", "e.String == \"\": self.Clear() elif len(e.String) >= 1: if not", "for index, item in enumerate(self.d_l): self.lc.Append( [item.name, item.element, item.quantity, item.sale_price,", "self.SetInsertionPointEnd() e.Skip() def onTextChange(self, e): if os.name == \"nt\": if", "minWidth, prefHeight, maxHeight): return super().GetAdjustedSize(*popup_size) def Update(self, s=''): self.lc.DeleteAllItems() self.d_l", ">= 0: self.lc.Select(item) self.curitem = item def OnLeftDown(self, e): try:", "if self.lc.ItemCount > 0: if self.curitem < (self.lc.ItemCount - 1):", "self.KeyESC() def KeyESC(self): a = self.ComboCtrl.Value self.Dismiss() self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd() def", "== \"\": self.Clear() elif len(e.String) >= 1: if not self.IsPopupShown():", "\" else: self.ChangeValue('') pg.dosage_per.ChangeValue('') pg.usage_unit.Label = '{Đơn vị} ' pg.times.ChangeValue(\"\")", "dwh pg = self.Parent if dwh: pg.usage_unit.Label = dwh.usage_unit +", "self.lc.AppendColumn('Cách dùng', width=100) self.lc.Bind(wx.EVT_MOTION, self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Update()", "self.lc = None self.mv = parent.mv self.init_d_l = query_linedrug_list(self.mv.sess).all() self.d_l", "self.lc def SetStringValue(self, val): idx = self.lc.FindItem(-1, val) if idx", "idx = self.lc.FindItem(-1, val) if idx != wx.NOT_FOUND: self.lc.Select(idx) def", "' pg.usage.ChangeValue(\"\") def onKeyPress(self, e): if os.name == \"posix\": if", "x.name.casefold() or s.casefold() in x.element.casefold(), self.init_d_l)) for index, item in", "self.Popup() self.SetInsertionPointEnd() if os.name == \"posix\": if e.String == \"\":", "self.curitem = item def OnLeftDown(self, e): try: self.value = self.curitem", "val): idx = self.lc.FindItem(-1, val) if idx != wx.NOT_FOUND: self.lc.Select(idx)", "return super().GetAdjustedSize(*popup_size) def Update(self, s=''): self.lc.DeleteAllItems() self.d_l = list(filter( lambda", "0: if self.curitem < (self.lc.ItemCount - 1): self.curitem += 1", "self.Clear() elif len(e.String) >= 1: if not self.IsPopupShown(): self.Popup() self.SetInsertionPointEnd()", "> 0: if self.curitem < (self.lc.ItemCount - 1): self.curitem +=", "Enter để search thuốc\") self._drugWH = None self.EnablePopupAnimation(enable=False) @property def", "self.value >= 0: return self.lc.GetItemText(self.value, col=0) return \"\" def GetAdjustedSize(self,", "= self.d_l[self.value] self.Dismiss() self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd() except IndexError: self.Dismiss() def OnPopup(self):", "x.element.casefold(), self.init_d_l)) for index, item in enumerate(self.d_l): self.lc.Append( [item.name, item.element,", "item >= 0: self.lc.Select(item) self.curitem = item def OnLeftDown(self, e):", "len(e.String) >= 1: if not self.IsPopupShown(): self.Popup() self.SetInsertionPointEnd() if os.name", "dwh): self._drugWH = dwh pg = self.Parent if dwh: pg.usage_unit.Label", "= self.Value self.Dismiss() self.ChangeValue(a) self.SetInsertionPointEnd() e.Skip() def onTextChange(self, e): if", "self._drugWH = None self.EnablePopupAnimation(enable=False) @property def drugWH(self): return self._drugWH @drugWH.setter", "prefHeight, maxHeight): return super().GetAdjustedSize(*popup_size) def Update(self, s=''): self.lc.DeleteAllItems() self.d_l =", "query_linedrug_list import os import wx class DrugPopup(wx.ComboPopup): def __init__(self, parent):", "0: self.curitem -= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) else: self.KeyESC() def KeyESC(self):", "= self.ComboCtrl.Value self.Dismiss() self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd() def KeyReturn(self): self.OnLeftDown(None) def onKeyPress(self,", "self.d_l[self.value] self.Dismiss() self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd() except IndexError: self.Dismiss() def OnPopup(self): self.Init()", "+= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyUp(self): if self.lc.ItemCount > 0:", "= query_linedrug_list(self.mv.sess).all() self.d_l = [] def Create(self, parent): self.lc =", "* from core.db.db_func import query_linedrug_list import os import wx class", "wx.WXK_DOWN]: if not self.IsPopupShown(): self.Popup() else: e.Skip() else: if e.GetKeyCode()", "parent): super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER) self.mv = parent.mv self.drug_popup = DrugPopup(self)", "self.SetHint(\"Nhấn Enter để search thuốc\") self._drugWH = None self.EnablePopupAnimation(enable=False) @property", "def Create(self, parent): self.lc = wx.ListCtrl( parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL", "giá') self.lc.AppendColumn('Cách dùng', width=100) self.lc.Bind(wx.EVT_MOTION, self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)", "== \"posix\": if e.String == \"\": self.Clear() def Clear(self): self.drugWH", "= -1 def GetControl(self): return self.lc def SetStringValue(self, val): idx", "\"posix\": if e.String == \"\": self.Clear() def Clear(self): self.drugWH =", "item.quantity, item.sale_price, item.usage]) if item.quantity <= user_setting[\"so_luong_thuoc_toi_thieu_de_bao_dong_do\"]: self.lc.SetItemTextColour(index, wx.Colour(252, 3,", "\"posix\": if e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]: if not self.IsPopupShown(): self.Popup()", "\" \" else: self.ChangeValue('') pg.dosage_per.ChangeValue('') pg.usage_unit.Label = '{Đơn vị} '", "1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyUp(self): if self.lc.ItemCount", "e.Skip() def onTextChange(self, e): if os.name == \"nt\": if e.String", "style=wx.TE_PROCESS_ENTER) self.mv = parent.mv self.drug_popup = DrugPopup(self) self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)", "self.onKeyPress) self.Bind(wx.EVT_TEXT, self.onTextChange) self.SetHint(\"Nhấn Enter để search thuốc\") self._drugWH =", "self.lc.EnsureVisible(self.curitem) def KeyUp(self): if self.lc.ItemCount > 0: if self.curitem >", "self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Update() return True def Init(self): self.value", "self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyUp(self): if self.lc.ItemCount >", "self.Bind(wx.EVT_TEXT, self.onTextChange) self.SetHint(\"Nhấn Enter để search thuốc\") self._drugWH = None", "item.quantity <= user_setting[\"so_luong_thuoc_toi_thieu_de_bao_dong_do\"]: self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57, 255)) def OnMotion(self,", "KeyReturn(self): self.OnLeftDown(None) def onKeyPress(self, e): c = e.GetKeyCode() if c", "return \"\" def GetAdjustedSize(self, minWidth, prefHeight, maxHeight): return super().GetAdjustedSize(*popup_size) def", "\"\": self.Clear() elif len(e.String) >= 1: if not self.IsPopupShown(): self.Popup()", "e): try: self.value = self.curitem self.ComboCtrl.drugWH = self.d_l[self.value] self.Dismiss() self.ComboCtrl.SelectAll()", "' pg.times.ChangeValue(\"\") pg.quantity.ChangeValue(\"\") pg.sale_unit.Label = '{Đơn vị} ' pg.usage.ChangeValue(\"\") def", "\" \" pg.sale_unit.Label = dwh.sale_unit + \" \" else: self.ChangeValue('')", "= '{Đơn vị} ' pg.times.ChangeValue(\"\") pg.quantity.ChangeValue(\"\") pg.sale_unit.Label = '{Đơn vị}", "self.KeyESC() elif c == wx.WXK_RETURN: self.KeyReturn() class DrugPicker(wx.ComboCtrl): def __init__(self,", "self.lc.HitTest(e.GetPosition()) if item >= 0: self.lc.Select(item) self.curitem = item def", "return self._drugWH @drugWH.setter def drugWH(self, dwh): self._drugWH = dwh pg", "(self.lc.ItemCount - 1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyUp(self):", "user_setting[\"so_luong_thuoc_toi_thieu_de_bao_dong_do\"]: self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57, 255)) def OnMotion(self, e): item,", "GetAdjustedSize(self, minWidth, prefHeight, maxHeight): return super().GetAdjustedSize(*popup_size) def Update(self, s=''): self.lc.DeleteAllItems()", "\" pg.sale_unit.Label = dwh.sale_unit + \" \" else: self.ChangeValue('') pg.dosage_per.ChangeValue('')", "!= wx.NOT_FOUND: self.lc.Select(idx) def GetStringValue(self): if self.value >= 0: return", "= None self.mv = parent.mv self.init_d_l = query_linedrug_list(self.mv.sess).all() self.d_l =", ">= 1: if not self.IsPopupShown(): self.Popup() self.SetInsertionPointEnd() if os.name ==", "- 1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyUp(self): if", "onTextChange(self, e): if os.name == \"nt\": if e.String == \"\":", "> 0: self.curitem -= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) else: self.KeyESC() def", "wx.Colour(252, 3, 57, 255)) def OnMotion(self, e): item, flags =", "item, flags = self.lc.HitTest(e.GetPosition()) if item >= 0: self.lc.Select(item) self.curitem", "else: self.ChangeValue('') pg.dosage_per.ChangeValue('') pg.usage_unit.Label = '{Đơn vị} ' pg.times.ChangeValue(\"\") pg.quantity.ChangeValue(\"\")", "= item def OnLeftDown(self, e): try: self.value = self.curitem self.ComboCtrl.drugWH", "self.SetInsertionPointEnd() if os.name == \"posix\": if e.String == \"\": self.Clear()", "wx.WXK_RETURN: self.KeyReturn() class DrugPicker(wx.ComboCtrl): def __init__(self, parent): super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER)", "pg = self.Parent if dwh: pg.usage_unit.Label = dwh.usage_unit + \"", "item in enumerate(self.d_l): self.lc.Append( [item.name, item.element, item.quantity, item.sale_price, item.usage]) if", "not in [wx.WXK_RETURN, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_ESCAPE]: if self.IsPopupShown(): a =", "self.Update() return True def Init(self): self.value = -1 self.curitem =", "3, 57, 255)) def OnMotion(self, e): item, flags = self.lc.HitTest(e.GetPosition())", "e): c = e.GetKeyCode() if c == wx.WXK_DOWN: self.KeyDown() elif", "s=''): self.lc.DeleteAllItems() self.d_l = list(filter( lambda x: s.casefold() in x.name.casefold()", "in [wx.WXK_RETURN, wx.WXK_DOWN]: if not self.IsPopupShown(): self.Popup() else: e.Skip() else:", "self.Dismiss() self.ChangeValue(a) self.SetInsertionPointEnd() e.Skip() def onTextChange(self, e): if os.name ==", "self._drugWH @drugWH.setter def drugWH(self, dwh): self._drugWH = dwh pg =", "query_linedrug_list(self.mv.sess).all() self.d_l = [] def Create(self, parent): self.lc = wx.ListCtrl(", "def KeyDown(self): if self.lc.ItemCount > 0: if self.curitem < (self.lc.ItemCount", "super().__init__() self.lc = None self.mv = parent.mv self.init_d_l = query_linedrug_list(self.mv.sess).all()", "255)) def OnMotion(self, e): item, flags = self.lc.HitTest(e.GetPosition()) if item", "return True def Init(self): self.value = -1 self.curitem = -1", "self.curitem self.ComboCtrl.drugWH = self.d_l[self.value] self.Dismiss() self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd() except IndexError: self.Dismiss()", "s.casefold() in x.name.casefold() or s.casefold() in x.element.casefold(), self.init_d_l)) for index,", "self.lc.ItemCount > 0: if self.curitem < (self.lc.ItemCount - 1): self.curitem", "0: if self.curitem > 0: self.curitem -= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem)", "self.lc.EnsureVisible(self.curitem) else: self.KeyESC() def KeyESC(self): a = self.ComboCtrl.Value self.Dismiss() self.ComboCtrl.ChangeValue(a)", "a = self.ComboCtrl.Value self.Dismiss() self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd() def KeyReturn(self): self.OnLeftDown(None) def", "if e.GetKeyCode() not in [wx.WXK_RETURN, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_ESCAPE]: if self.IsPopupShown():", "def GetStringValue(self): if self.value >= 0: return self.lc.GetItemText(self.value, col=0) return", "import os import wx class DrugPopup(wx.ComboPopup): def __init__(self, parent): super().__init__()", "= [] def Create(self, parent): self.lc = wx.ListCtrl( parent, style=wx.LC_REPORT", "= '{Đơn vị} ' pg.usage.ChangeValue(\"\") def onKeyPress(self, e): if os.name", "item.element, item.quantity, item.sale_price, item.usage]) if item.quantity <= user_setting[\"so_luong_thuoc_toi_thieu_de_bao_dong_do\"]: self.lc.SetItemTextColour(index, wx.Colour(252,", "c == wx.WXK_UP: self.KeyUp() elif c == wx.WXK_ESCAPE: self.KeyESC() elif", "self.lc.FindItem(-1, val) if idx != wx.NOT_FOUND: self.lc.Select(idx) def GetStringValue(self): if", "= dwh pg = self.Parent if dwh: pg.usage_unit.Label = dwh.usage_unit", "self.OnLeftDown(None) def onKeyPress(self, e): c = e.GetKeyCode() if c ==", "@drugWH.setter def drugWH(self, dwh): self._drugWH = dwh pg = self.Parent", "item def OnLeftDown(self, e): try: self.value = self.curitem self.ComboCtrl.drugWH =", "if c == wx.WXK_DOWN: self.KeyDown() elif c == wx.WXK_UP: self.KeyUp()", "thuốc\") self._drugWH = None self.EnablePopupAnimation(enable=False) @property def drugWH(self): return self._drugWH", "def Init(self): self.value = -1 self.curitem = -1 def GetControl(self):", "= self.Parent if dwh: pg.usage_unit.Label = dwh.usage_unit + \" \"", "+ \" \" else: self.ChangeValue('') pg.dosage_per.ChangeValue('') pg.usage_unit.Label = '{Đơn vị}", "= list(filter( lambda x: s.casefold() in x.name.casefold() or s.casefold() in", "import wx class DrugPopup(wx.ComboPopup): def __init__(self, parent): super().__init__() self.lc =", "dwh: pg.usage_unit.Label = dwh.usage_unit + \" \" pg.sale_unit.Label = dwh.sale_unit", "Init(self): self.value = -1 self.curitem = -1 def GetControl(self): return", "pg.times.ChangeValue(\"\") pg.quantity.ChangeValue(\"\") pg.sale_unit.Label = '{Đơn vị} ' pg.usage.ChangeValue(\"\") def onKeyPress(self,", "super().GetAdjustedSize(*popup_size) def Update(self, s=''): self.lc.DeleteAllItems() self.d_l = list(filter( lambda x:", "not self.IsPopupShown(): self.Popup() else: e.Skip() else: if e.GetKeyCode() not in", "-1 def GetControl(self): return self.lc def SetStringValue(self, val): idx =", "parent.mv self.init_d_l = query_linedrug_list(self.mv.sess).all() self.d_l = [] def Create(self, parent):", "def onKeyPress(self, e): c = e.GetKeyCode() if c == wx.WXK_DOWN:", "wx class DrugPopup(wx.ComboPopup): def __init__(self, parent): super().__init__() self.lc = None", "self.drug_popup = DrugPopup(self) self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Bind(wx.EVT_TEXT, self.onTextChange) self.SetHint(\"Nhấn Enter", "self.lc.AppendColumn('Thành phần', width=150) self.lc.AppendColumn('Số lượng') self.lc.AppendColumn('Đơn giá') self.lc.AppendColumn('Cách dùng', width=100)", "if not self.IsPopupShown(): self.Popup() else: e.Skip() else: if e.GetKeyCode() not", "if e.String == \"\": self.Clear() def Clear(self): self.drugWH = None", "wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc', width=200) self.lc.AppendColumn('Thành phần', width=150) self.lc.AppendColumn('Số lượng')", "wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc', width=200) self.lc.AppendColumn('Thành phần', width=150) self.lc.AppendColumn('Số lượng') self.lc.AppendColumn('Đơn giá')", "def GetAdjustedSize(self, minWidth, prefHeight, maxHeight): return super().GetAdjustedSize(*popup_size) def Update(self, s=''):", "class DrugPopup(wx.ComboPopup): def __init__(self, parent): super().__init__() self.lc = None self.mv", "1: if not self.IsPopupShown(): self.Popup() self.SetInsertionPointEnd() if os.name == \"posix\":", "- 1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyDown(self): if", "[] def Create(self, parent): self.lc = wx.ListCtrl( parent, style=wx.LC_REPORT |", "= -1 self.curitem = -1 def GetControl(self): return self.lc def", "self.ComboCtrl.SetInsertionPointEnd() except IndexError: self.Dismiss() def OnPopup(self): self.Init() self.Update(self.ComboCtrl.Value) if self.lc.ItemCount", "or s.casefold() in x.element.casefold(), self.init_d_l)) for index, item in enumerate(self.d_l):", "self.ComboCtrl.SetInsertionPointEnd() def KeyReturn(self): self.OnLeftDown(None) def onKeyPress(self, e): c = e.GetKeyCode()", "return self.lc.GetItemText(self.value, col=0) return \"\" def GetAdjustedSize(self, minWidth, prefHeight, maxHeight):", "= self.curitem self.ComboCtrl.drugWH = self.d_l[self.value] self.Dismiss() self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd() except IndexError:", "import * from core.db.db_func import query_linedrug_list import os import wx", "e): if os.name == \"nt\": if e.String == \"\": self.Clear()", "self.lc.Append( [item.name, item.element, item.quantity, item.sale_price, item.usage]) if item.quantity <= user_setting[\"so_luong_thuoc_toi_thieu_de_bao_dong_do\"]:", "Create(self, parent): self.lc = wx.ListCtrl( parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL |", "SetStringValue(self, val): idx = self.lc.FindItem(-1, val) if idx != wx.NOT_FOUND:", "self.IsPopupShown(): self.Popup() else: e.Skip() else: if e.GetKeyCode() not in [wx.WXK_RETURN,", "val) if idx != wx.NOT_FOUND: self.lc.Select(idx) def GetStringValue(self): if self.value", "return self.lc def SetStringValue(self, val): idx = self.lc.FindItem(-1, val) if", "in enumerate(self.d_l): self.lc.Append( [item.name, item.element, item.quantity, item.sale_price, item.usage]) if item.quantity", "a = self.Value self.Dismiss() self.ChangeValue(a) self.SetInsertionPointEnd() e.Skip() def onTextChange(self, e):", "e.GetKeyCode() if c == wx.WXK_DOWN: self.KeyDown() elif c == wx.WXK_UP:", "try: self.value = self.curitem self.ComboCtrl.drugWH = self.d_l[self.value] self.Dismiss() self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd()", "in x.name.casefold() or s.casefold() in x.element.casefold(), self.init_d_l)) for index, item", "wx.ListCtrl( parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc', width=200) self.lc.AppendColumn('Thành", "'{Đơn vị} ' pg.times.ChangeValue(\"\") pg.quantity.ChangeValue(\"\") pg.sale_unit.Label = '{Đơn vị} '", "-= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) else: self.KeyESC() def KeyESC(self): a =", "def OnLeftDown(self, e): try: self.value = self.curitem self.ComboCtrl.drugWH = self.d_l[self.value]", "self.lc.Select(idx) def GetStringValue(self): if self.value >= 0: return self.lc.GetItemText(self.value, col=0)", "def KeyESC(self): a = self.ComboCtrl.Value self.Dismiss() self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd() def KeyReturn(self):", "dwh.usage_unit + \" \" pg.sale_unit.Label = dwh.sale_unit + \" \"", "< (self.lc.ItemCount - 1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def", "self.KeyUp() elif c == wx.WXK_ESCAPE: self.KeyESC() elif c == wx.WXK_RETURN:", "list(filter( lambda x: s.casefold() in x.name.casefold() or s.casefold() in x.element.casefold(),", "from core.db.db_func import query_linedrug_list import os import wx class DrugPopup(wx.ComboPopup):", "== wx.WXK_DOWN: self.KeyDown() elif c == wx.WXK_UP: self.KeyUp() elif c", "os.name == \"posix\": if e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]: if not", "None self.mv = parent.mv self.init_d_l = query_linedrug_list(self.mv.sess).all() self.d_l = []", "if self.IsPopupShown(): a = self.Value self.Dismiss() self.ChangeValue(a) self.SetInsertionPointEnd() e.Skip() def", "== wx.WXK_UP: self.KeyUp() elif c == wx.WXK_ESCAPE: self.KeyESC() elif c", "col=0) return \"\" def GetAdjustedSize(self, minWidth, prefHeight, maxHeight): return super().GetAdjustedSize(*popup_size)", "pg.usage_unit.Label = dwh.usage_unit + \" \" pg.sale_unit.Label = dwh.sale_unit +", "e): item, flags = self.lc.HitTest(e.GetPosition()) if item >= 0: self.lc.Select(item)", "pg.usage_unit.Label = '{Đơn vị} ' pg.times.ChangeValue(\"\") pg.quantity.ChangeValue(\"\") pg.sale_unit.Label = '{Đơn", "s.casefold() in x.element.casefold(), self.init_d_l)) for index, item in enumerate(self.d_l): self.lc.Append(", "\"\": self.Clear() def Clear(self): self.drugWH = None def refreshPopup(self): self.drug_popup.init_d_l", "core.db.db_func import query_linedrug_list import os import wx class DrugPopup(wx.ComboPopup): def", "index, item in enumerate(self.d_l): self.lc.Append( [item.name, item.element, item.quantity, item.sale_price, item.usage])", "1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyDown(self): if self.lc.ItemCount", "self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Bind(wx.EVT_TEXT, self.onTextChange) self.SetHint(\"Nhấn Enter để search thuốc\") self._drugWH", "self.EnablePopupAnimation(enable=False) @property def drugWH(self): return self._drugWH @drugWH.setter def drugWH(self, dwh):", "in [wx.WXK_RETURN, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_ESCAPE]: if self.IsPopupShown(): a = self.Value", "else: self.KeyESC() def KeyESC(self): a = self.ComboCtrl.Value self.Dismiss() self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd()", "style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc', width=200) self.lc.AppendColumn('Thành phần', width=150)", "self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Bind(wx.EVT_TEXT, self.onTextChange) self.SetHint(\"Nhấn Enter để search thuốc\")", "= None self.EnablePopupAnimation(enable=False) @property def drugWH(self): return self._drugWH @drugWH.setter def", "pg.sale_unit.Label = dwh.sale_unit + \" \" else: self.ChangeValue('') pg.dosage_per.ChangeValue('') pg.usage_unit.Label", "= dwh.sale_unit + \" \" else: self.ChangeValue('') pg.dosage_per.ChangeValue('') pg.usage_unit.Label =", "if os.name == \"posix\": if e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]: if", "vị} ' pg.usage.ChangeValue(\"\") def onKeyPress(self, e): if os.name == \"posix\":", "= dwh.usage_unit + \" \" pg.sale_unit.Label = dwh.sale_unit + \"", "if self.value >= 0: return self.lc.GetItemText(self.value, col=0) return \"\" def", "self.lc.Select(item) self.curitem = item def OnLeftDown(self, e): try: self.value =", "[item.name, item.element, item.quantity, item.sale_price, item.usage]) if item.quantity <= user_setting[\"so_luong_thuoc_toi_thieu_de_bao_dong_do\"]: self.lc.SetItemTextColour(index,", "self.IsPopupShown(): a = self.Value self.Dismiss() self.ChangeValue(a) self.SetInsertionPointEnd() e.Skip() def onTextChange(self,", "0: self.lc.Select(item) self.curitem = item def OnLeftDown(self, e): try: self.value", "self.init_d_l = query_linedrug_list(self.mv.sess).all() self.d_l = [] def Create(self, parent): self.lc", "self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57, 255)) def OnMotion(self, e): item, flags", "self.Value self.Dismiss() self.ChangeValue(a) self.SetInsertionPointEnd() e.Skip() def onTextChange(self, e): if os.name", "vị} ' pg.times.ChangeValue(\"\") pg.quantity.ChangeValue(\"\") pg.sale_unit.Label = '{Đơn vị} ' pg.usage.ChangeValue(\"\")", "OnPopup(self): self.Init() self.Update(self.ComboCtrl.Value) if self.lc.ItemCount > 0: if self.curitem <", "e): if os.name == \"posix\": if e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]:", "def OnPopup(self): self.Init() self.Update(self.ComboCtrl.Value) if self.lc.ItemCount > 0: if self.curitem", "self.KeyReturn() class DrugPicker(wx.ComboCtrl): def __init__(self, parent): super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER) self.mv", "dùng', width=100) self.lc.Bind(wx.EVT_MOTION, self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Update() return", "pg.sale_unit.Label = '{Đơn vị} ' pg.usage.ChangeValue(\"\") def onKeyPress(self, e): if", "wx.WXK_ESCAPE: self.KeyESC() elif c == wx.WXK_RETURN: self.KeyReturn() class DrugPicker(wx.ComboCtrl): def", "= parent.mv self.drug_popup = DrugPopup(self) self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Bind(wx.EVT_TEXT, self.onTextChange)", "pg.usage.ChangeValue(\"\") def onKeyPress(self, e): if os.name == \"posix\": if e.GetKeyCode()", "os.name == \"nt\": if e.String == \"\": self.Clear() elif len(e.String)", "initialize import * from core.db.db_func import query_linedrug_list import os import", "= wx.ListCtrl( parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc', width=200)", "def drugWH(self): return self._drugWH @drugWH.setter def drugWH(self, dwh): self._drugWH =", "self.mv = parent.mv self.init_d_l = query_linedrug_list(self.mv.sess).all() self.d_l = [] def", "if self.curitem < (self.lc.ItemCount - 1): self.curitem += 1 self.lc.Select(self.curitem)", "parent): super().__init__() self.lc = None self.mv = parent.mv self.init_d_l =", "@property def drugWH(self): return self._drugWH @drugWH.setter def drugWH(self, dwh): self._drugWH", "self.KeyDown() elif c == wx.WXK_UP: self.KeyUp() elif c == wx.WXK_ESCAPE:", "0: return self.lc.GetItemText(self.value, col=0) return \"\" def GetAdjustedSize(self, minWidth, prefHeight,", "+= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyDown(self): if self.lc.ItemCount > 0:", "import query_linedrug_list import os import wx class DrugPopup(wx.ComboPopup): def __init__(self,", "def __init__(self, parent): super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER) self.mv = parent.mv self.drug_popup", "DrugPicker(wx.ComboCtrl): def __init__(self, parent): super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER) self.mv = parent.mv", "onKeyPress(self, e): c = e.GetKeyCode() if c == wx.WXK_DOWN: self.KeyDown()", "KeyESC(self): a = self.ComboCtrl.Value self.Dismiss() self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd() def KeyReturn(self): self.OnLeftDown(None)", "GetControl(self): return self.lc def SetStringValue(self, val): idx = self.lc.FindItem(-1, val)", "self.onKeyPress) self.Update() return True def Init(self): self.value = -1 self.curitem", "[wx.WXK_RETURN, wx.WXK_DOWN]: if not self.IsPopupShown(): self.Popup() else: e.Skip() else: if", "self.IsPopupShown(): self.Popup() self.SetInsertionPointEnd() if os.name == \"posix\": if e.String ==", "if e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]: if not self.IsPopupShown(): self.Popup() else:", "wx.WXK_UP, wx.WXK_DOWN, wx.WXK_ESCAPE]: if self.IsPopupShown(): a = self.Value self.Dismiss() self.ChangeValue(a)", "| wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc', width=200) self.lc.AppendColumn('Thành phần', width=150) self.lc.AppendColumn('Số", "os.name == \"posix\": if e.String == \"\": self.Clear() def Clear(self):", "width=150) self.lc.AppendColumn('Số lượng') self.lc.AppendColumn('Đơn giá') self.lc.AppendColumn('Cách dùng', width=100) self.lc.Bind(wx.EVT_MOTION, self.OnMotion)", "size=drugctrl_size, style=wx.TE_PROCESS_ENTER) self.mv = parent.mv self.drug_popup = DrugPopup(self) self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN,", "x: s.casefold() in x.name.casefold() or s.casefold() in x.element.casefold(), self.init_d_l)) for", "elif c == wx.WXK_RETURN: self.KeyReturn() class DrugPicker(wx.ComboCtrl): def __init__(self, parent):", "pg.quantity.ChangeValue(\"\") pg.sale_unit.Label = '{Đơn vị} ' pg.usage.ChangeValue(\"\") def onKeyPress(self, e):", "self.curitem -= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) else: self.KeyESC() def KeyESC(self): a", "class DrugPicker(wx.ComboCtrl): def __init__(self, parent): super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER) self.mv =", "= DrugPopup(self) self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Bind(wx.EVT_TEXT, self.onTextChange) self.SetHint(\"Nhấn Enter để", "item.usage]) if item.quantity <= user_setting[\"so_luong_thuoc_toi_thieu_de_bao_dong_do\"]: self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57, 255))", "self.lc.GetItemText(self.value, col=0) return \"\" def GetAdjustedSize(self, minWidth, prefHeight, maxHeight): return", "self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd() except IndexError: self.Dismiss() def OnPopup(self): self.Init() self.Update(self.ComboCtrl.Value) if", "e.Skip() else: if e.GetKeyCode() not in [wx.WXK_RETURN, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_ESCAPE]:", "self.init_d_l)) for index, item in enumerate(self.d_l): self.lc.Append( [item.name, item.element, item.quantity,", "def __init__(self, parent): super().__init__() self.lc = None self.mv = parent.mv", "self.lc.DeleteAllItems() self.d_l = list(filter( lambda x: s.casefold() in x.name.casefold() or", "def OnMotion(self, e): item, flags = self.lc.HitTest(e.GetPosition()) if item >=", "<= user_setting[\"so_luong_thuoc_toi_thieu_de_bao_dong_do\"]: self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57, 255)) def OnMotion(self, e):", "c == wx.WXK_ESCAPE: self.KeyESC() elif c == wx.WXK_RETURN: self.KeyReturn() class", "self._drugWH = dwh pg = self.Parent if dwh: pg.usage_unit.Label =", "True def Init(self): self.value = -1 self.curitem = -1 def", "lambda x: s.casefold() in x.name.casefold() or s.casefold() in x.element.casefold(), self.init_d_l))", "57, 255)) def OnMotion(self, e): item, flags = self.lc.HitTest(e.GetPosition()) if", "if item.quantity <= user_setting[\"so_luong_thuoc_toi_thieu_de_bao_dong_do\"]: self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57, 255)) def", "def KeyUp(self): if self.lc.ItemCount > 0: if self.curitem > 0:", "super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER) self.mv = parent.mv self.drug_popup = DrugPopup(self) self.SetPopupControl(self.drug_popup)", "if item >= 0: self.lc.Select(item) self.curitem = item def OnLeftDown(self,", ">= 0: return self.lc.GetItemText(self.value, col=0) return \"\" def GetAdjustedSize(self, minWidth,", "drugWH(self, dwh): self._drugWH = dwh pg = self.Parent if dwh:", "self.value = self.curitem self.ComboCtrl.drugWH = self.d_l[self.value] self.Dismiss() self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd() except", "self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) else: self.KeyESC() def KeyESC(self): a = self.ComboCtrl.Value self.Dismiss()", "e.String == \"\": self.Clear() def Clear(self): self.drugWH = None def", "def Update(self, s=''): self.lc.DeleteAllItems() self.d_l = list(filter( lambda x: s.casefold()", "__init__(self, parent): super().__init__() self.lc = None self.mv = parent.mv self.init_d_l", "self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyDown(self): if self.lc.ItemCount > 0: if self.curitem", "self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd() def KeyReturn(self): self.OnLeftDown(None) def onKeyPress(self, e): c =", "\"\" def GetAdjustedSize(self, minWidth, prefHeight, maxHeight): return super().GetAdjustedSize(*popup_size) def Update(self,", "self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyDown(self): if self.lc.ItemCount >", "if self.lc.ItemCount > 0: if self.curitem > 0: self.curitem -=", "def onKeyPress(self, e): if os.name == \"posix\": if e.GetKeyCode() in", "self.lc.Bind(wx.EVT_MOTION, self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Update() return True def", "== \"\": self.Clear() def Clear(self): self.drugWH = None def refreshPopup(self):", "in x.element.casefold(), self.init_d_l)) for index, item in enumerate(self.d_l): self.lc.Append( [item.name,", "if e.String == \"\": self.Clear() elif len(e.String) >= 1: if", "+ \" \" pg.sale_unit.Label = dwh.sale_unit + \" \" else:", "elif c == wx.WXK_UP: self.KeyUp() elif c == wx.WXK_ESCAPE: self.KeyESC()", "self.Update(self.ComboCtrl.Value) if self.lc.ItemCount > 0: if self.curitem < (self.lc.ItemCount -", "self.d_l = list(filter( lambda x: s.casefold() in x.name.casefold() or s.casefold()", "wx.WXK_DOWN: self.KeyDown() elif c == wx.WXK_UP: self.KeyUp() elif c ==", "-1 self.curitem = -1 def GetControl(self): return self.lc def SetStringValue(self,", "== wx.WXK_ESCAPE: self.KeyESC() elif c == wx.WXK_RETURN: self.KeyReturn() class DrugPicker(wx.ComboCtrl):", "Update(self, s=''): self.lc.DeleteAllItems() self.d_l = list(filter( lambda x: s.casefold() in", "| wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc', width=200) self.lc.AppendColumn('Thành phần', width=150) self.lc.AppendColumn('Số lượng') self.lc.AppendColumn('Đơn", "c == wx.WXK_RETURN: self.KeyReturn() class DrugPicker(wx.ComboCtrl): def __init__(self, parent): super().__init__(parent,", "parent.mv self.drug_popup = DrugPopup(self) self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Bind(wx.EVT_TEXT, self.onTextChange) self.SetHint(\"Nhấn", "item.sale_price, item.usage]) if item.quantity <= user_setting[\"so_luong_thuoc_toi_thieu_de_bao_dong_do\"]: self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57,", "self.ComboCtrl.Value self.Dismiss() self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd() def KeyReturn(self): self.OnLeftDown(None) def onKeyPress(self, e):", "e.GetKeyCode() not in [wx.WXK_RETURN, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_ESCAPE]: if self.IsPopupShown(): a", "= parent.mv self.init_d_l = query_linedrug_list(self.mv.sess).all() self.d_l = [] def Create(self,", "dwh.sale_unit + \" \" else: self.ChangeValue('') pg.dosage_per.ChangeValue('') pg.usage_unit.Label = '{Đơn", "self.curitem < (self.lc.ItemCount - 1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem)", "= self.lc.HitTest(e.GetPosition()) if item >= 0: self.lc.Select(item) self.curitem = item", "self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Update() return True def Init(self): self.value = -1", "== \"nt\": if e.String == \"\": self.Clear() elif len(e.String) >=", "self.Popup() else: e.Skip() else: if e.GetKeyCode() not in [wx.WXK_RETURN, wx.WXK_UP,", "e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]: if not self.IsPopupShown(): self.Popup() else: e.Skip()", "self.lc.AppendColumn('Thuốc', width=200) self.lc.AppendColumn('Thành phần', width=150) self.lc.AppendColumn('Số lượng') self.lc.AppendColumn('Đơn giá') self.lc.AppendColumn('Cách", "if os.name == \"posix\": if e.String == \"\": self.Clear() def", "1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyDown(self): if self.lc.ItemCount > 0: if", "self.Clear() def Clear(self): self.drugWH = None def refreshPopup(self): self.drug_popup.init_d_l =", "= self.lc.FindItem(-1, val) if idx != wx.NOT_FOUND: self.lc.Select(idx) def GetStringValue(self):", "__init__(self, parent): super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER) self.mv = parent.mv self.drug_popup =", "1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) else: self.KeyESC() def KeyESC(self): a = self.ComboCtrl.Value", "elif c == wx.WXK_ESCAPE: self.KeyESC() elif c == wx.WXK_RETURN: self.KeyReturn()", "elif len(e.String) >= 1: if not self.IsPopupShown(): self.Popup() self.SetInsertionPointEnd() if", "= e.GetKeyCode() if c == wx.WXK_DOWN: self.KeyDown() elif c ==", "[wx.WXK_RETURN, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_ESCAPE]: if self.IsPopupShown(): a = self.Value self.Dismiss()", "wx.NOT_FOUND: self.lc.Select(idx) def GetStringValue(self): if self.value >= 0: return self.lc.GetItemText(self.value,", "wx.WXK_UP: self.KeyUp() elif c == wx.WXK_ESCAPE: self.KeyESC() elif c ==", "self.Init() self.Update(self.ComboCtrl.Value) if self.lc.ItemCount > 0: if self.curitem < (self.lc.ItemCount", "self.lc.ItemCount > 0: if self.curitem > 0: self.curitem -= 1", "onKeyPress(self, e): if os.name == \"posix\": if e.GetKeyCode() in [wx.WXK_RETURN,", "self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Update() return True def Init(self):", "self.curitem = -1 def GetControl(self): return self.lc def SetStringValue(self, val):", "else: if e.GetKeyCode() not in [wx.WXK_RETURN, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_ESCAPE]: if", "def KeyReturn(self): self.OnLeftDown(None) def onKeyPress(self, e): c = e.GetKeyCode() if", "self.Dismiss() def OnPopup(self): self.Init() self.Update(self.ComboCtrl.Value) if self.lc.ItemCount > 0: if", "self.lc.EnsureVisible(self.curitem) def KeyDown(self): if self.lc.ItemCount > 0: if self.curitem <", "'{Đơn vị} ' pg.usage.ChangeValue(\"\") def onKeyPress(self, e): if os.name ==", "if os.name == \"nt\": if e.String == \"\": self.Clear() elif", "> 0: if self.curitem > 0: self.curitem -= 1 self.lc.Select(self.curitem)", "1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyUp(self): if self.lc.ItemCount > 0: if", "if idx != wx.NOT_FOUND: self.lc.Select(idx) def GetStringValue(self): if self.value >=", "self.lc.AppendColumn('Số lượng') self.lc.AppendColumn('Đơn giá') self.lc.AppendColumn('Cách dùng', width=100) self.lc.Bind(wx.EVT_MOTION, self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN,", "except IndexError: self.Dismiss() def OnPopup(self): self.Init() self.Update(self.ComboCtrl.Value) if self.lc.ItemCount >", "width=200) self.lc.AppendColumn('Thành phần', width=150) self.lc.AppendColumn('Số lượng') self.lc.AppendColumn('Đơn giá') self.lc.AppendColumn('Cách dùng',", "self.value = -1 self.curitem = -1 def GetControl(self): return self.lc", "os import wx class DrugPopup(wx.ComboPopup): def __init__(self, parent): super().__init__() self.lc", "self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Update() return True def Init(self): self.value =", "GetStringValue(self): if self.value >= 0: return self.lc.GetItemText(self.value, col=0) return \"\"", "def GetControl(self): return self.lc def SetStringValue(self, val): idx = self.lc.FindItem(-1,", "self.curitem > 0: self.curitem -= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) else: self.KeyESC()" ]
[ "Roteiro4.Roteiro4__funcoes import Grafo class Grafos: # Grafo da Paraíba paraiba", "'C', 'E', 'P']) for aresta in ['J-C', 'J-P', 'J-E', 'C-E',", "= Grafo(['A', 'B', 'C']) for aresta in ['A-B', 'B-C', 'C-A']:", "class Grafos: # Grafo da Paraíba paraiba = Grafo(['J', 'C',", "Grafo(['J', 'C', 'E', 'P']) for aresta in ['J-C', 'J-P', 'J-E',", "'P', 'M', 'T', 'Z']) for aresta in ['J-C', 'C-E', 'C-E',", "in ['J-C', 'C-E', 'C-E', 'C-P', 'C-P', 'C-M', 'C-T', 'M-T', 'T-Z']:", "'P']) for aresta in ['J-C', 'J-P', 'J-E', 'C-E', 'C-P', 'P-E']:", "'C', 'E', 'P', 'M', 'T', 'Z']) for aresta in ['J-C',", "'C-P', 'C-M', 'C-T', 'M-T', 'T-Z']: paraiba.adicionaAresta(aresta) # --- # #", "'J-P', 'J-E', 'C-E', 'C-P', 'P-E']: grafo_completo.adicionaAresta(aresta) # --- # #", "'P-E']: grafo_completo.adicionaAresta(aresta) # --- # # K3 k3 = Grafo(['A',", "'B', 'C']) for aresta in ['A-B', 'B-C', 'C-A']: k3.adicionaAresta(aresta) #", "Paraíba paraiba = Grafo(['J', 'C', 'E', 'P', 'M', 'T', 'Z'])", "paraiba = Grafo(['J', 'C', 'E', 'P', 'M', 'T', 'Z']) for", "Grafo Completo grafo_completo = Grafo(['J', 'C', 'E', 'P']) for aresta", "# --- # # K3 k3 = Grafo(['A', 'B', 'C'])", "for aresta in ['J-C', 'C-E', 'C-E', 'C-P', 'C-P', 'C-M', 'C-T',", "# Grafo da Paraíba paraiba = Grafo(['J', 'C', 'E', 'P',", "'C-P', 'C-P', 'C-M', 'C-T', 'M-T', 'T-Z']: paraiba.adicionaAresta(aresta) # --- #", "'C-E', 'C-E', 'C-P', 'C-P', 'C-M', 'C-T', 'M-T', 'T-Z']: paraiba.adicionaAresta(aresta) #", "paraiba.adicionaAresta(aresta) # --- # # Grafo Completo grafo_completo = Grafo(['J',", "# --- # # Grafo Completo grafo_completo = Grafo(['J', 'C',", "'J-E', 'C-E', 'C-P', 'P-E']: grafo_completo.adicionaAresta(aresta) # --- # # K3", "['J-C', 'C-E', 'C-E', 'C-P', 'C-P', 'C-M', 'C-T', 'M-T', 'T-Z']: paraiba.adicionaAresta(aresta)", "Grafo(['J', 'C', 'E', 'P', 'M', 'T', 'Z']) for aresta in", "for aresta in ['A-B', 'B-C', 'C-A']: k3.adicionaAresta(aresta) # --- #", "grafo_completo = Grafo(['J', 'C', 'E', 'P']) for aresta in ['J-C',", "aresta in ['J-C', 'C-E', 'C-E', 'C-P', 'C-P', 'C-M', 'C-T', 'M-T',", "--- # # Grafo Completo grafo_completo = Grafo(['J', 'C', 'E',", "<reponame>GuilhermeEsdras/Grafos<gh_stars>0 from Roteiro4.Roteiro4__funcoes import Grafo class Grafos: # Grafo da", "Grafo class Grafos: # Grafo da Paraíba paraiba = Grafo(['J',", "da Paraíba paraiba = Grafo(['J', 'C', 'E', 'P', 'M', 'T',", "from Roteiro4.Roteiro4__funcoes import Grafo class Grafos: # Grafo da Paraíba", "grafo_completo.adicionaAresta(aresta) # --- # # K3 k3 = Grafo(['A', 'B',", "'M', 'T', 'Z']) for aresta in ['J-C', 'C-E', 'C-E', 'C-P',", "K3 k3 = Grafo(['A', 'B', 'C']) for aresta in ['A-B',", "= Grafo(['J', 'C', 'E', 'P', 'M', 'T', 'Z']) for aresta", "for aresta in ['J-C', 'J-P', 'J-E', 'C-E', 'C-P', 'P-E']: grafo_completo.adicionaAresta(aresta)", "'C-T', 'M-T', 'T-Z']: paraiba.adicionaAresta(aresta) # --- # # Grafo Completo", "k3 = Grafo(['A', 'B', 'C']) for aresta in ['A-B', 'B-C',", "'T-Z']: paraiba.adicionaAresta(aresta) # --- # # Grafo Completo grafo_completo =", "['J-C', 'J-P', 'J-E', 'C-E', 'C-P', 'P-E']: grafo_completo.adicionaAresta(aresta) # --- #", "'C-M', 'C-T', 'M-T', 'T-Z']: paraiba.adicionaAresta(aresta) # --- # # Grafo", "in ['J-C', 'J-P', 'J-E', 'C-E', 'C-P', 'P-E']: grafo_completo.adicionaAresta(aresta) # ---", "= Grafo(['J', 'C', 'E', 'P']) for aresta in ['J-C', 'J-P',", "'C-P', 'P-E']: grafo_completo.adicionaAresta(aresta) # --- # # K3 k3 =", "'Z']) for aresta in ['J-C', 'C-E', 'C-E', 'C-P', 'C-P', 'C-M',", "'C-E', 'C-P', 'C-P', 'C-M', 'C-T', 'M-T', 'T-Z']: paraiba.adicionaAresta(aresta) # ---", "'M-T', 'T-Z']: paraiba.adicionaAresta(aresta) # --- # # Grafo Completo grafo_completo", "'C-E', 'C-P', 'P-E']: grafo_completo.adicionaAresta(aresta) # --- # # K3 k3", "Completo grafo_completo = Grafo(['J', 'C', 'E', 'P']) for aresta in", "'T', 'Z']) for aresta in ['J-C', 'C-E', 'C-E', 'C-P', 'C-P',", "# Grafo Completo grafo_completo = Grafo(['J', 'C', 'E', 'P']) for", "Grafos: # Grafo da Paraíba paraiba = Grafo(['J', 'C', 'E',", "Grafo da Paraíba paraiba = Grafo(['J', 'C', 'E', 'P', 'M',", "'E', 'P']) for aresta in ['J-C', 'J-P', 'J-E', 'C-E', 'C-P',", "# K3 k3 = Grafo(['A', 'B', 'C']) for aresta in", "'E', 'P', 'M', 'T', 'Z']) for aresta in ['J-C', 'C-E',", "Grafo(['A', 'B', 'C']) for aresta in ['A-B', 'B-C', 'C-A']: k3.adicionaAresta(aresta)", "# # K3 k3 = Grafo(['A', 'B', 'C']) for aresta", "aresta in ['J-C', 'J-P', 'J-E', 'C-E', 'C-P', 'P-E']: grafo_completo.adicionaAresta(aresta) #", "'C']) for aresta in ['A-B', 'B-C', 'C-A']: k3.adicionaAresta(aresta) # ---", "--- # # K3 k3 = Grafo(['A', 'B', 'C']) for", "# # Grafo Completo grafo_completo = Grafo(['J', 'C', 'E', 'P'])", "import Grafo class Grafos: # Grafo da Paraíba paraiba =" ]
[ "= get_object_or_404(Factory, pk=fact_id) return render(request, 'board/detail.html', {'info':info}) @login_required def manager(request):", "request.method == 'POST': form = FactoryForm(request.POST) print form if form.is_valid():", "@login_required def manager(request): print \"manager...\" try: people = People.objects.get(user=request.user) factory", "args=(factid,))) else: form = FactoryForm() return render_to_response('board/new.html', {'form': form}, context_instance=RequestContext(request))", "save in Baidu Map params = urlencode({ 'title': title.encode(\"utf-8\"), 'address':", "'longitude': lng, 'job_num': num, 'factory_id': fact_id, } head = {", "from django.http import HttpResponseRedirect, HttpResponse from django.core.urlresolvers import reverse from", "head) print str(req) response = urllib2.urlopen(req) #print respone.read() lean_response =", "'no hire action...' return redirect(reverse('joboard.views.index', args=[])) return render(request, 'board/manager.html', {'info':factory})", "RequestContext from django.core.exceptions import ObjectDoesNotExist from urllib import urlopen, urlencode", "People.objects.get(user=request.user) factory = Factory.objects.get(fact_maintainer=people) except ObjectDoesNotExist: print 'no hire action...'", "return render(request, 'board/detail.html', {'info':info}) @login_required def manager(request): print \"manager...\" try:", "ObjectDoesNotExist: print 'no hire action...' return redirect(reverse('joboard.views.index', args=[])) return render(request,", "form.save(commit=False) print request.user factmodel.fact_maintainer = People.objects.get(user=request.user) factmodel.save() factid = factmodel.id", "= logging.getLogger(__name__) @login_required def index(request): form = None if request.method", "= urllib2.urlopen(req) #print respone.read() lean_response = fromJSON(response.read()) print lean_response lean_objectId", "utf-8 -*- from django.shortcuts import get_object_or_404, render_to_response, render from django.http", "geo_table, l_url, app_id, app_key from utils.pack_json import toJSON, fromJSON from", "django.shortcuts import get_object_or_404, render_to_response, render from django.http import HttpResponseRedirect, HttpResponse", "import redirect from joboard.models import Factory from joboard.forms import FactoryForm", "= form.save(commit=False) print request.user factmodel.fact_maintainer = People.objects.get(user=request.user) factmodel.save() factid =", "urllib2.Request(l_url, toJSON(data), head) print str(req) response = urllib2.urlopen(req) #print respone.read()", "context_instance=RequestContext(request)) @login_required def detail(request, fact_id): print fact_id info = get_object_or_404(Factory,", "manager(request): print \"manager...\" try: people = People.objects.get(user=request.user) factory = Factory.objects.get(fact_maintainer=people)", "\" + factory['fact_addr']) #save factory in model factmodel = form.save(commit=False)", "b_ak, geo_table, l_url, app_id, app_key from utils.pack_json import toJSON, fromJSON", "action...' return redirect(reverse('joboard.views.index', args=[])) return render(request, 'board/manager.html', {'info':factory}) def save_factory_cloud(fact_info,", "} req = urllib2.Request(l_url, toJSON(data), head) print str(req) response =", "in public server: leancloud and baidu save_factory_cloud(factory, factid) return HttpResponseRedirect(reverse('board:detail',", "= FactoryForm(request.POST) print form if form.is_valid(): factory = form.cleaned_data logger.debug(\"lat:", "\"manager...\" try: people = People.objects.get(user=request.user) factory = Factory.objects.get(fact_maintainer=people) except ObjectDoesNotExist:", "b_ak, 'job_num': num, 'lean_id': lean_objectId, }) req = urllib2.Request(b_url, params)", "import logging logger = logging.getLogger(__name__) @login_required def index(request): form =", "def detail(request, fact_id): print fact_id info = get_object_or_404(Factory, pk=fact_id) return", "None if request.method == 'POST': form = FactoryForm(request.POST) print form", "fact_id): print fact_id info = get_object_or_404(Factory, pk=fact_id) return render(request, 'board/detail.html',", "'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng, 'coord_type': 3, 'geotable_id': geo_table,", "'job_num': num, 'lean_id': lean_objectId, }) req = urllib2.Request(b_url, params) #print", "= form.cleaned_data logger.debug(\"lat: \" + str(factory['fact_lat'])) logger.debug(\"addr: \" + factory['fact_addr'])", "except ObjectDoesNotExist: print 'no hire action...' return redirect(reverse('joboard.views.index', args=[])) return", "<filename>fuzzybee/joboard/views.py # -*- coding: utf-8 -*- from django.shortcuts import get_object_or_404,", "= fact_info['hire_num'] data = { 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude':", "'board/detail.html', {'info':info}) @login_required def manager(request): print \"manager...\" try: people =", "print lean_response lean_objectId = lean_response['objectId'] # save in Baidu Map", "print str(req) response = urllib2.urlopen(req) #print respone.read() lean_response = fromJSON(response.read())", "Map params = urlencode({ 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat,", "urllib2.urlopen(req) #print respone.read() lean_response = fromJSON(response.read()) print lean_response lean_objectId =", "from fuzzybee.conf import b_url, b_ak, geo_table, l_url, app_id, app_key from", "@login_required def index(request): form = None if request.method == 'POST':", "logger.debug(\"lat: \" + str(factory['fact_lat'])) logger.debug(\"addr: \" + factory['fact_addr']) #save factory", "= fact_info['fact_addr'] lat = fact_info['fact_lat'] lng = fact_info['fact_lng'] num =", "render_to_response, render from django.http import HttpResponseRedirect, HttpResponse from django.core.urlresolvers import", "\" + str(factory['fact_lat'])) logger.debug(\"addr: \" + factory['fact_addr']) #save factory in", "from django.core.urlresolvers import reverse from django.shortcuts import redirect from joboard.models", "l_url, app_id, app_key from utils.pack_json import toJSON, fromJSON from django.contrib.auth.decorators", "import RequestContext from django.core.exceptions import ObjectDoesNotExist from urllib import urlopen,", "detail(request, fact_id): print fact_id info = get_object_or_404(Factory, pk=fact_id) return render(request,", "print fact_id info = get_object_or_404(Factory, pk=fact_id) return render(request, 'board/detail.html', {'info':info})", "form}, context_instance=RequestContext(request)) @login_required def detail(request, fact_id): print fact_id info =", "get_object_or_404, render_to_response, render from django.http import HttpResponseRedirect, HttpResponse from django.core.urlresolvers", "'ak': b_ak, 'job_num': num, 'lean_id': lean_objectId, }) req = urllib2.Request(b_url,", "if form.is_valid(): factory = form.cleaned_data logger.debug(\"lat: \" + str(factory['fact_lat'])) logger.debug(\"addr:", "hire action...' return redirect(reverse('joboard.views.index', args=[])) return render(request, 'board/manager.html', {'info':factory}) def", "request.user factmodel.fact_maintainer = People.objects.get(user=request.user) factmodel.save() factid = factmodel.id #save in", "render from django.http import HttpResponseRedirect, HttpResponse from django.core.urlresolvers import reverse", "return render(request, 'board/manager.html', {'info':factory}) def save_factory_cloud(fact_info, fact_id): title = fact_info['fact_name']", "'job_num': num, 'factory_id': fact_id, } head = { 'X-AVOSCloud-Application-Id': app_id,", "'X-AVOSCloud-Application-Key': app_key, 'Content-Type': 'application/json', } req = urllib2.Request(l_url, toJSON(data), head)", "lean_response['objectId'] # save in Baidu Map params = urlencode({ 'title':", "if request.method == 'POST': form = FactoryForm(request.POST) print form if", "'application/json', } req = urllib2.Request(l_url, toJSON(data), head) print str(req) response", "{'info':factory}) def save_factory_cloud(fact_info, fact_id): title = fact_info['fact_name'] address = fact_info['fact_addr']", "login_required from people.models import People import logging logger = logging.getLogger(__name__)", "from utils.pack_json import toJSON, fromJSON from django.contrib.auth.decorators import login_required from", "= None if request.method == 'POST': form = FactoryForm(request.POST) print", "urlencode import urllib2 from fuzzybee.conf import b_url, b_ak, geo_table, l_url,", "#save factory in model factmodel = form.save(commit=False) print request.user factmodel.fact_maintainer", "form = None if request.method == 'POST': form = FactoryForm(request.POST)", "}) req = urllib2.Request(b_url, params) #print str(req) response = urllib2.urlopen(req)", "People.objects.get(user=request.user) factmodel.save() factid = factmodel.id #save in public server: leancloud", "urllib import urlopen, urlencode import urllib2 from fuzzybee.conf import b_url,", "@login_required def detail(request, fact_id): print fact_id info = get_object_or_404(Factory, pk=fact_id)", "params = urlencode({ 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude':", "fact_info['fact_addr'] lat = fact_info['fact_lat'] lng = fact_info['fact_lng'] num = fact_info['hire_num']", "{ 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng, 'job_num':", "lat, 'longitude': lng, 'coord_type': 3, 'geotable_id': geo_table, 'ak': b_ak, 'job_num':", "= Factory.objects.get(fact_maintainer=people) except ObjectDoesNotExist: print 'no hire action...' return redirect(reverse('joboard.views.index',", "= fact_info['fact_name'] address = fact_info['fact_addr'] lat = fact_info['fact_lat'] lng =", "FactoryForm(request.POST) print form if form.is_valid(): factory = form.cleaned_data logger.debug(\"lat: \"", "print 'no hire action...' return redirect(reverse('joboard.views.index', args=[])) return render(request, 'board/manager.html',", "= urllib2.Request(b_url, params) #print str(req) response = urllib2.urlopen(req) #print respone.read()", "'latitude': lat, 'longitude': lng, 'job_num': num, 'factory_id': fact_id, } head", "num, 'factory_id': fact_id, } head = { 'X-AVOSCloud-Application-Id': app_id, 'X-AVOSCloud-Application-Key':", "django.core.exceptions import ObjectDoesNotExist from urllib import urlopen, urlencode import urllib2", "django.template import RequestContext from django.core.exceptions import ObjectDoesNotExist from urllib import", "import Factory from joboard.forms import FactoryForm from django.template import RequestContext", "baidu save_factory_cloud(factory, factid) return HttpResponseRedirect(reverse('board:detail', args=(factid,))) else: form = FactoryForm()", "= People.objects.get(user=request.user) factmodel.save() factid = factmodel.id #save in public server:", "save_factory_cloud(factory, factid) return HttpResponseRedirect(reverse('board:detail', args=(factid,))) else: form = FactoryForm() return", "= fromJSON(response.read()) print lean_response lean_objectId = lean_response['objectId'] # save in", "address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng, 'job_num': num, 'factory_id': fact_id, }", "= fact_info['fact_lng'] num = fact_info['hire_num'] data = { 'title': title.encode(\"utf-8\"),", "respone.read() lean_response = fromJSON(response.read()) print lean_response lean_objectId = lean_response['objectId'] #", "joboard.forms import FactoryForm from django.template import RequestContext from django.core.exceptions import", "from django.contrib.auth.decorators import login_required from people.models import People import logging", "factory = Factory.objects.get(fact_maintainer=people) except ObjectDoesNotExist: print 'no hire action...' return", "'factory_id': fact_id, } head = { 'X-AVOSCloud-Application-Id': app_id, 'X-AVOSCloud-Application-Key': app_key,", "from django.template import RequestContext from django.core.exceptions import ObjectDoesNotExist from urllib", "{ 'X-AVOSCloud-Application-Id': app_id, 'X-AVOSCloud-Application-Key': app_key, 'Content-Type': 'application/json', } req =", "logging.getLogger(__name__) @login_required def index(request): form = None if request.method ==", "ObjectDoesNotExist from urllib import urlopen, urlencode import urllib2 from fuzzybee.conf", "return render_to_response('board/new.html', {'form': form}, context_instance=RequestContext(request)) @login_required def detail(request, fact_id): print", "fromJSON(response.read()) print lean_response lean_objectId = lean_response['objectId'] # save in Baidu", "factid = factmodel.id #save in public server: leancloud and baidu", "fact_id, } head = { 'X-AVOSCloud-Application-Id': app_id, 'X-AVOSCloud-Application-Key': app_key, 'Content-Type':", "data = { 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude':", "= urllib2.Request(l_url, toJSON(data), head) print str(req) response = urllib2.urlopen(req) #print", "app_key from utils.pack_json import toJSON, fromJSON from django.contrib.auth.decorators import login_required", "title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng, 'job_num': num, 'factory_id':", "return HttpResponseRedirect(reverse('board:detail', args=(factid,))) else: form = FactoryForm() return render_to_response('board/new.html', {'form':", "django.http import HttpResponseRedirect, HttpResponse from django.core.urlresolvers import reverse from django.shortcuts", "redirect(reverse('joboard.views.index', args=[])) return render(request, 'board/manager.html', {'info':factory}) def save_factory_cloud(fact_info, fact_id): title", "import ObjectDoesNotExist from urllib import urlopen, urlencode import urllib2 from", "import HttpResponseRedirect, HttpResponse from django.core.urlresolvers import reverse from django.shortcuts import", "app_key, 'Content-Type': 'application/json', } req = urllib2.Request(l_url, toJSON(data), head) print", "import login_required from people.models import People import logging logger =", "form.cleaned_data logger.debug(\"lat: \" + str(factory['fact_lat'])) logger.debug(\"addr: \" + factory['fact_addr']) #save", "lat, 'longitude': lng, 'job_num': num, 'factory_id': fact_id, } head =", "str(req) response = urllib2.urlopen(req) #print respone.read() lean_response = fromJSON(response.read()) print", "= lean_response['objectId'] # save in Baidu Map params = urlencode({", "address = fact_info['fact_addr'] lat = fact_info['fact_lat'] lng = fact_info['fact_lng'] num", "HttpResponseRedirect, HttpResponse from django.core.urlresolvers import reverse from django.shortcuts import redirect", "# save in Baidu Map params = urlencode({ 'title': title.encode(\"utf-8\"),", "= People.objects.get(user=request.user) factory = Factory.objects.get(fact_maintainer=people) except ObjectDoesNotExist: print 'no hire", "urlopen, urlencode import urllib2 from fuzzybee.conf import b_url, b_ak, geo_table,", "{'info':info}) @login_required def manager(request): print \"manager...\" try: people = People.objects.get(user=request.user)", "HttpResponse from django.core.urlresolvers import reverse from django.shortcuts import redirect from", "and baidu save_factory_cloud(factory, factid) return HttpResponseRedirect(reverse('board:detail', args=(factid,))) else: form =", "address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng, 'coord_type': 3, 'geotable_id': geo_table, 'ak':", "info = get_object_or_404(Factory, pk=fact_id) return render(request, 'board/detail.html', {'info':info}) @login_required def", "import toJSON, fromJSON from django.contrib.auth.decorators import login_required from people.models import", "'POST': form = FactoryForm(request.POST) print form if form.is_valid(): factory =", "redirect from joboard.models import Factory from joboard.forms import FactoryForm from", "toJSON(data), head) print str(req) response = urllib2.urlopen(req) #print respone.read() lean_response", "'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng, 'job_num': num,", "django.core.urlresolvers import reverse from django.shortcuts import redirect from joboard.models import", "try: people = People.objects.get(user=request.user) factory = Factory.objects.get(fact_maintainer=people) except ObjectDoesNotExist: print", "= { 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng,", "HttpResponseRedirect(reverse('board:detail', args=(factid,))) else: form = FactoryForm() return render_to_response('board/new.html', {'form': form},", "people = People.objects.get(user=request.user) factory = Factory.objects.get(fact_maintainer=people) except ObjectDoesNotExist: print 'no", "django.shortcuts import redirect from joboard.models import Factory from joboard.forms import", "title = fact_info['fact_name'] address = fact_info['fact_addr'] lat = fact_info['fact_lat'] lng", "import urlopen, urlencode import urllib2 from fuzzybee.conf import b_url, b_ak,", "Baidu Map params = urlencode({ 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude':", "joboard.models import Factory from joboard.forms import FactoryForm from django.template import", "= FactoryForm() return render_to_response('board/new.html', {'form': form}, context_instance=RequestContext(request)) @login_required def detail(request,", "'coord_type': 3, 'geotable_id': geo_table, 'ak': b_ak, 'job_num': num, 'lean_id': lean_objectId,", "num = fact_info['hire_num'] data = { 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"),", "+ factory['fact_addr']) #save factory in model factmodel = form.save(commit=False) print", "args=[])) return render(request, 'board/manager.html', {'info':factory}) def save_factory_cloud(fact_info, fact_id): title =", "'longitude': lng, 'coord_type': 3, 'geotable_id': geo_table, 'ak': b_ak, 'job_num': num,", "fact_id): title = fact_info['fact_name'] address = fact_info['fact_addr'] lat = fact_info['fact_lat']", "from people.models import People import logging logger = logging.getLogger(__name__) @login_required", "from django.shortcuts import get_object_or_404, render_to_response, render from django.http import HttpResponseRedirect,", "import urllib2 from fuzzybee.conf import b_url, b_ak, geo_table, l_url, app_id,", "get_object_or_404(Factory, pk=fact_id) return render(request, 'board/detail.html', {'info':info}) @login_required def manager(request): print", "django.contrib.auth.decorators import login_required from people.models import People import logging logger", "return redirect(reverse('joboard.views.index', args=[])) return render(request, 'board/manager.html', {'info':factory}) def save_factory_cloud(fact_info, fact_id):", "def save_factory_cloud(fact_info, fact_id): title = fact_info['fact_name'] address = fact_info['fact_addr'] lat", "def manager(request): print \"manager...\" try: people = People.objects.get(user=request.user) factory =", "model factmodel = form.save(commit=False) print request.user factmodel.fact_maintainer = People.objects.get(user=request.user) factmodel.save()", "logger = logging.getLogger(__name__) @login_required def index(request): form = None if", "{'form': form}, context_instance=RequestContext(request)) @login_required def detail(request, fact_id): print fact_id info", "lat = fact_info['fact_lat'] lng = fact_info['fact_lng'] num = fact_info['hire_num'] data", "lean_response lean_objectId = lean_response['objectId'] # save in Baidu Map params", "geo_table, 'ak': b_ak, 'job_num': num, 'lean_id': lean_objectId, }) req =", "import FactoryForm from django.template import RequestContext from django.core.exceptions import ObjectDoesNotExist", "from joboard.forms import FactoryForm from django.template import RequestContext from django.core.exceptions", "public server: leancloud and baidu save_factory_cloud(factory, factid) return HttpResponseRedirect(reverse('board:detail', args=(factid,)))", "coding: utf-8 -*- from django.shortcuts import get_object_or_404, render_to_response, render from", "str(factory['fact_lat'])) logger.debug(\"addr: \" + factory['fact_addr']) #save factory in model factmodel", "factory['fact_addr']) #save factory in model factmodel = form.save(commit=False) print request.user", "form = FactoryForm() return render_to_response('board/new.html', {'form': form}, context_instance=RequestContext(request)) @login_required def", "'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng, 'job_num': num, 'factory_id': fact_id,", "import b_url, b_ak, geo_table, l_url, app_id, app_key from utils.pack_json import", "fact_info['fact_lng'] num = fact_info['hire_num'] data = { 'title': title.encode(\"utf-8\"), 'address':", "-*- from django.shortcuts import get_object_or_404, render_to_response, render from django.http import", "print \"manager...\" try: people = People.objects.get(user=request.user) factory = Factory.objects.get(fact_maintainer=people) except", "app_id, 'X-AVOSCloud-Application-Key': app_key, 'Content-Type': 'application/json', } req = urllib2.Request(l_url, toJSON(data),", "'geotable_id': geo_table, 'ak': b_ak, 'job_num': num, 'lean_id': lean_objectId, }) req", "factmodel = form.save(commit=False) print request.user factmodel.fact_maintainer = People.objects.get(user=request.user) factmodel.save() factid", "reverse from django.shortcuts import redirect from joboard.models import Factory from", "head = { 'X-AVOSCloud-Application-Id': app_id, 'X-AVOSCloud-Application-Key': app_key, 'Content-Type': 'application/json', }", "== 'POST': form = FactoryForm(request.POST) print form if form.is_valid(): factory", "render(request, 'board/manager.html', {'info':factory}) def save_factory_cloud(fact_info, fact_id): title = fact_info['fact_name'] address", "lean_response = fromJSON(response.read()) print lean_response lean_objectId = lean_response['objectId'] # save", "title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng, 'coord_type': 3, 'geotable_id':", "people.models import People import logging logger = logging.getLogger(__name__) @login_required def", "'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng, 'coord_type': 3,", "index(request): form = None if request.method == 'POST': form =", "'Content-Type': 'application/json', } req = urllib2.Request(l_url, toJSON(data), head) print str(req)", "from django.core.exceptions import ObjectDoesNotExist from urllib import urlopen, urlencode import", "from django.shortcuts import redirect from joboard.models import Factory from joboard.forms", "num, 'lean_id': lean_objectId, }) req = urllib2.Request(b_url, params) #print str(req)", "form = FactoryForm(request.POST) print form if form.is_valid(): factory = form.cleaned_data", "fact_info['fact_name'] address = fact_info['fact_addr'] lat = fact_info['fact_lat'] lng = fact_info['fact_lng']", "logger.debug(\"addr: \" + factory['fact_addr']) #save factory in model factmodel =", "from joboard.models import Factory from joboard.forms import FactoryForm from django.template", "-*- coding: utf-8 -*- from django.shortcuts import get_object_or_404, render_to_response, render", "= { 'X-AVOSCloud-Application-Id': app_id, 'X-AVOSCloud-Application-Key': app_key, 'Content-Type': 'application/json', } req", "fuzzybee.conf import b_url, b_ak, geo_table, l_url, app_id, app_key from utils.pack_json", "pk=fact_id) return render(request, 'board/detail.html', {'info':info}) @login_required def manager(request): print \"manager...\"", "print request.user factmodel.fact_maintainer = People.objects.get(user=request.user) factmodel.save() factid = factmodel.id #save", "lean_objectId = lean_response['objectId'] # save in Baidu Map params =", "lng, 'job_num': num, 'factory_id': fact_id, } head = { 'X-AVOSCloud-Application-Id':", "# -*- coding: utf-8 -*- from django.shortcuts import get_object_or_404, render_to_response,", "req = urllib2.Request(b_url, params) #print str(req) response = urllib2.urlopen(req) #print", "logging logger = logging.getLogger(__name__) @login_required def index(request): form = None", "= urlencode({ 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng,", "+ str(factory['fact_lat'])) logger.debug(\"addr: \" + factory['fact_addr']) #save factory in model", "= fact_info['fact_lat'] lng = fact_info['fact_lng'] num = fact_info['hire_num'] data =", "factmodel.save() factid = factmodel.id #save in public server: leancloud and", "#print respone.read() lean_response = fromJSON(response.read()) print lean_response lean_objectId = lean_response['objectId']", "urllib2 from fuzzybee.conf import b_url, b_ak, geo_table, l_url, app_id, app_key", "in model factmodel = form.save(commit=False) print request.user factmodel.fact_maintainer = People.objects.get(user=request.user)", "= factmodel.id #save in public server: leancloud and baidu save_factory_cloud(factory,", "import get_object_or_404, render_to_response, render from django.http import HttpResponseRedirect, HttpResponse from", "factmodel.id #save in public server: leancloud and baidu save_factory_cloud(factory, factid)", "form.is_valid(): factory = form.cleaned_data logger.debug(\"lat: \" + str(factory['fact_lat'])) logger.debug(\"addr: \"", "render_to_response('board/new.html', {'form': form}, context_instance=RequestContext(request)) @login_required def detail(request, fact_id): print fact_id", "fact_info['fact_lat'] lng = fact_info['fact_lng'] num = fact_info['hire_num'] data = {", "FactoryForm from django.template import RequestContext from django.core.exceptions import ObjectDoesNotExist from", "lng = fact_info['fact_lng'] num = fact_info['hire_num'] data = { 'title':", "'X-AVOSCloud-Application-Id': app_id, 'X-AVOSCloud-Application-Key': app_key, 'Content-Type': 'application/json', } req = urllib2.Request(l_url,", "factory = form.cleaned_data logger.debug(\"lat: \" + str(factory['fact_lat'])) logger.debug(\"addr: \" +", "'latitude': lat, 'longitude': lng, 'coord_type': 3, 'geotable_id': geo_table, 'ak': b_ak,", "#save in public server: leancloud and baidu save_factory_cloud(factory, factid) return", "print form if form.is_valid(): factory = form.cleaned_data logger.debug(\"lat: \" +", "lng, 'coord_type': 3, 'geotable_id': geo_table, 'ak': b_ak, 'job_num': num, 'lean_id':", "server: leancloud and baidu save_factory_cloud(factory, factid) return HttpResponseRedirect(reverse('board:detail', args=(factid,))) else:", "req = urllib2.Request(l_url, toJSON(data), head) print str(req) response = urllib2.urlopen(req)", "in Baidu Map params = urlencode({ 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"),", "def index(request): form = None if request.method == 'POST': form", "b_url, b_ak, geo_table, l_url, app_id, app_key from utils.pack_json import toJSON,", "factmodel.fact_maintainer = People.objects.get(user=request.user) factmodel.save() factid = factmodel.id #save in public", "save_factory_cloud(fact_info, fact_id): title = fact_info['fact_name'] address = fact_info['fact_addr'] lat =", "toJSON, fromJSON from django.contrib.auth.decorators import login_required from people.models import People", "urlencode({ 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat, 'longitude': lng, 'coord_type':", "Factory.objects.get(fact_maintainer=people) except ObjectDoesNotExist: print 'no hire action...' return redirect(reverse('joboard.views.index', args=[]))", "'board/manager.html', {'info':factory}) def save_factory_cloud(fact_info, fact_id): title = fact_info['fact_name'] address =", "utils.pack_json import toJSON, fromJSON from django.contrib.auth.decorators import login_required from people.models", "Factory from joboard.forms import FactoryForm from django.template import RequestContext from", "fact_id info = get_object_or_404(Factory, pk=fact_id) return render(request, 'board/detail.html', {'info':info}) @login_required", "import People import logging logger = logging.getLogger(__name__) @login_required def index(request):", "render(request, 'board/detail.html', {'info':info}) @login_required def manager(request): print \"manager...\" try: people", "} head = { 'X-AVOSCloud-Application-Id': app_id, 'X-AVOSCloud-Application-Key': app_key, 'Content-Type': 'application/json',", "else: form = FactoryForm() return render_to_response('board/new.html', {'form': form}, context_instance=RequestContext(request)) @login_required", "People import logging logger = logging.getLogger(__name__) @login_required def index(request): form", "FactoryForm() return render_to_response('board/new.html', {'form': form}, context_instance=RequestContext(request)) @login_required def detail(request, fact_id):", "lean_objectId, }) req = urllib2.Request(b_url, params) #print str(req) response =", "app_id, app_key from utils.pack_json import toJSON, fromJSON from django.contrib.auth.decorators import", "import reverse from django.shortcuts import redirect from joboard.models import Factory", "factory in model factmodel = form.save(commit=False) print request.user factmodel.fact_maintainer =", "fromJSON from django.contrib.auth.decorators import login_required from people.models import People import", "'lean_id': lean_objectId, }) req = urllib2.Request(b_url, params) #print str(req) response", "from urllib import urlopen, urlencode import urllib2 from fuzzybee.conf import", "factid) return HttpResponseRedirect(reverse('board:detail', args=(factid,))) else: form = FactoryForm() return render_to_response('board/new.html',", "3, 'geotable_id': geo_table, 'ak': b_ak, 'job_num': num, 'lean_id': lean_objectId, })", "leancloud and baidu save_factory_cloud(factory, factid) return HttpResponseRedirect(reverse('board:detail', args=(factid,))) else: form", "fact_info['hire_num'] data = { 'title': title.encode(\"utf-8\"), 'address': address.encode(\"utf-8\"), 'latitude': lat,", "form if form.is_valid(): factory = form.cleaned_data logger.debug(\"lat: \" + str(factory['fact_lat']))", "response = urllib2.urlopen(req) #print respone.read() lean_response = fromJSON(response.read()) print lean_response" ]
[ "from schema import Schema def create_app(**kwargs): app = Flask(__name__) app.debug", ") return app if __name__ == '__main__': app = create_app(graphiql=True)", "def create_app(**kwargs): app = Flask(__name__) app.debug = True app.add_url_rule( '/graphql',", "flask import Flask from flask_cors import CORS from flask_graphql import", "import CORS from flask_graphql import GraphQLView from schema import Schema", "import Flask from flask_cors import CORS from flask_graphql import GraphQLView", "app.debug = True app.add_url_rule( '/graphql', view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs) ) return", "flask_cors import CORS from flask_graphql import GraphQLView from schema import", "**kwargs) ) return app if __name__ == '__main__': app =", "from flask import Flask from flask_cors import CORS from flask_graphql", "from flask_cors import CORS from flask_graphql import GraphQLView from schema", "import GraphQLView from schema import Schema def create_app(**kwargs): app =", "app = Flask(__name__) app.debug = True app.add_url_rule( '/graphql', view_func=GraphQLView.as_view('graphql', schema=Schema,", "Flask(__name__) app.debug = True app.add_url_rule( '/graphql', view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs) )", "= Flask(__name__) app.debug = True app.add_url_rule( '/graphql', view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs)", "'/graphql', view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs) ) return app if __name__ ==", "CORS from flask_graphql import GraphQLView from schema import Schema def", "Schema def create_app(**kwargs): app = Flask(__name__) app.debug = True app.add_url_rule(", "schema import Schema def create_app(**kwargs): app = Flask(__name__) app.debug =", "True app.add_url_rule( '/graphql', view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs) ) return app if", "from flask_graphql import GraphQLView from schema import Schema def create_app(**kwargs):", "schema=Schema, **kwargs) ) return app if __name__ == '__main__': app", "return app if __name__ == '__main__': app = create_app(graphiql=True) CORS(app,", "__name__ == '__main__': app = create_app(graphiql=True) CORS(app, resources={r'/graphql': {'origins': '*'}})", "app.add_url_rule( '/graphql', view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs) ) return app if __name__", "Flask from flask_cors import CORS from flask_graphql import GraphQLView from", "app if __name__ == '__main__': app = create_app(graphiql=True) CORS(app, resources={r'/graphql':", "= True app.add_url_rule( '/graphql', view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs) ) return app", "flask_graphql import GraphQLView from schema import Schema def create_app(**kwargs): app", "== '__main__': app = create_app(graphiql=True) CORS(app, resources={r'/graphql': {'origins': '*'}}) app.run()", "import Schema def create_app(**kwargs): app = Flask(__name__) app.debug = True", "GraphQLView from schema import Schema def create_app(**kwargs): app = Flask(__name__)", "view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs) ) return app if __name__ == '__main__':", "if __name__ == '__main__': app = create_app(graphiql=True) CORS(app, resources={r'/graphql': {'origins':", "create_app(**kwargs): app = Flask(__name__) app.debug = True app.add_url_rule( '/graphql', view_func=GraphQLView.as_view('graphql'," ]
[ "dict( name = 'mortgage', label = 'Mortgage', mortgageRate = '4.5%',", "# Other fees as a percentage of the loan otherPurchaseFees", ", # Mortgage length (in years) downPayment = '20%' ,", "tax is not applied ) self.optionList['rentalDefaults'] = dict( rentalPayment =", "inflationRate = '1.8%', # Annual rate of inflation - NOT", "Other fees as a percentage of the loan otherPurchaseFees =", "home value ) self.optionList['investmentPropertyDefaults'] = dict( mortgageRate = '4.5%', #", "cap gains tax is not applied rentalIncome = '0.6%', #", "a percentage of the loan otherPurchaseFees = '0.5%', # Other", "taxes as percentage of home value insuranceRate = '0.4%', #", "'1.8%', # Annual rate of inflation - NOT IMPLEMENTED appreciationRate", "Mortgage length (in years) downPayment = '0%' , # Percentage", "of savings inflationRate = '1.8%', # Annual rate of inflation", "# Cost of selling the house capitalGainsTax = '0.0%', #", "rate of increase in value of house houseValue = '100%',", "# Paid if selling house within two years capitalGainsPeriod =", "selling house within two years capitalGainsPeriod = '0' , #", "downPayment = '0%' , # Percentage of house cost paid", "Paid if selling house within two years capitalGainsPeriod = '2'", "= '2' , # Years after which cap gains tax", "= '0.0%', # Mortgage annual interest rate mortgageLength = '30Y'", "which cap gains tax is not applied ) self.optionList['rentalDefaults'] =", "not applied ) self.optionList['rentalDefaults'] = dict( rentalPayment = '0.6%', #", "dict( rentalPayment = '0.6%', # Monthly rental price as percentage", "'0.5%', # Other fees as a percentage of the loan", "percentage of home value ) self.optionList['mortgageDefaults'] = dict( name =", "'20%' , # Percentage of house cost paid upfront startingCash", "value ) def set_kind_options(self,kind,**inputOptions): self.options = self.optionList['commonDefaults'] if kind ==", "= '4.5%', # Mortgage annual interest rate mortgageLength = '30Y'", "Mortgage annual interest rate mortgageLength = '30Y' , # Mortgage", "for key,val in self.optionList['mortgageDefaults'].items(): self.options[key] = val elif kind ==", "percentage of home value insuranceRate = '0.4%', # Annual insurance", "'0.0%', # Paid if selling house within two years capitalGainsPeriod", "rental price as percentage of home value rentalPayment = '0.0%',", "paying for the house mortgageRate = '0.0%', # Mortgage annual", "percentage of home value rentalPayment = '0.0%', # Monthly rental", "# Number of mortgage payments per year taxRate = '0.6%',", "price as percentage of home value ) self.optionList['mortgageDefaults'] = dict(", "'0.4%', # Annual insurance as percentage of home value listingFee", "name = None , label = None , color =", "you bought it originationFees = '0.5%', # Mortgage fees as", "pass elif kind == 'mortgage': for key,val in self.optionList['mortgageDefaults'].items(): self.options[key]", "kind) self.set_input_options(**inputOptions) def set_default_options(self): self.optionList = dict() self.optionList['commonDefaults'] = dict(", "# Years after which cap gains tax is not applied", "Factory-like class for mortgage options class MortgageOptions: def __init__(self,kind,**inputOptions): self.set_default_options()", "# Mortgage fees as a percentage of the loan otherMortgageFees", "of home value insuranceRate = '0.4%', # Annual insurance as", "Years after which cap gains tax is not applied rentalIncome", "elif kind == 'rental': for key,val in self.optionList['rentalDefaults'].items(): self.options[key] =", "length (in years) downPayment = '20%' , # Percentage of", "== None: pass elif kind == 'mortgage': for key,val in", "'investmentProperty': for key,val in self.optionList['investmentPropertyDefaults'].items(): self.options[key] = val def set_input_options(self,**inputOptions):", "is worth when you bought it originationFees = '0.5%', #", "a percentage of the loan otherMortgageFees = '0.0%', # Other", "= '0.5%', # Other fees as a percentage of home", "= dict() self.optionList['commonDefaults'] = dict( name = None , label", "insurance as percentage of home value listingFee = '0.0%', #", "= '0.6%', # Monthly rental price as percentage of home", "otherMortgageFees = '0.5%', # Other fees as a percentage of", "rate of return of savings inflationRate = '1.8%', # Annual", "years) downPayment = '20%' , # Percentage of house cost", "money you have before purchase tvmRate = '7.0%', # Annual", "(in years) downPayment = '0%' , # Percentage of house", "fees as a percentage of the loan otherMortgageFees = '0.5%',", "of increase in value of house houseValue = '100%', #", "'30Y' , # Mortgage length (in years) downPayment = '0%'", "value listingFee = '0.0%', # Cost of selling the house", "Monthly rental price as percentage of home value ) def", "value paymentsPerYear = '12' , # Number of mortgage payments", "have before purchase tvmRate = '7.0%', # Annual rate of", "you have before purchase tvmRate = '7.0%', # Annual rate", "paymentsPerYear = '12' , # Number of mortgage payments per", "within two years capitalGainsPeriod = '0' , # Years after", "# Annual taxes as percentage of home value insuranceRate =", "selling the house capitalGainsTax = '0.0%', # Paid if selling", "= dict( name = 'mortgage', label = 'Mortgage', mortgageRate =", "paid upfront startingCash = '100%', # Amount of money you", "fees as a percentage of the loan otherMortgageFees = '0.0%',", "insurance as percentage of home value listingFee = '6.0%', #", "years) downPayment = '0%' , # Percentage of house cost", "loan otherPurchaseFees = '0.5%', # Other fees as a percentage", "for key,val in self.optionList['rentalDefaults'].items(): self.options[key] = val elif kind ==", "percentage of home value listingFee = '6.0%', # Cost of", "of mortgage payments per year taxRate = '0.0%', # Annual", "dict( name = None , label = None , color", "mortgageRate = '0.0%', # Mortgage annual interest rate mortgageLength =", "as percentage of home value insuranceRate = '0.4%', # Annual", "percentage of home value ) def set_kind_options(self,kind,**inputOptions): self.options = self.optionList['commonDefaults']", "as percentage of home value listingFee = '0.0%', # Cost", "per year taxRate = '0.0%', # Annual taxes as percentage", "of money you have before purchase originationFees = '0.5%', #", "capitalGainsTax = '0.0%', # Paid if selling house within two", "'7.0%', # Annual rate of return of savings inflationRate =", "val elif kind == 'investmentProperty': for key,val in self.optionList['investmentPropertyDefaults'].items(): self.options[key]", "self.optionList['rentalDefaults'].items(): self.options[key] = val elif kind == 'investmentProperty': for key,val", ") def set_kind_options(self,kind,**inputOptions): self.options = self.optionList['commonDefaults'] if kind == None:", "home value insuranceRate = '0.0%', # Annual insurance as percentage", "in self.optionList['investmentPropertyDefaults'].items(): self.options[key] = val def set_input_options(self,**inputOptions): for key,val in", "# Monthly rental price as percentage of home value )", "Monthly rental price as percentage of home value ) self.optionList['investmentPropertyDefaults']", "inflation - NOT IMPLEMENTED appreciationRate = '5.0%', # Annual rate", ", label = None , color = [0,0,0], houseCost =", "payments per year taxRate = '0.6%', # Annual taxes as", "are paying for the house mortgageRate = '0.0%', # Mortgage", "bought it originationFees = '0.5%', # Mortgage fees as a", "originationFees = '0.5%', # Mortgage fees as a percentage of", "Number of mortgage payments per year taxRate = '0.0%', #", "= '15%' , # Paid if selling house within two", "= None , color = [0,0,0], houseCost = '100%', #", "set_kind_options(self,kind,**inputOptions): self.options = self.optionList['commonDefaults'] if kind == None: pass elif", "rentalPayment = '0.0%', # Monthly rental price as percentage of", "'4.5%', # Mortgage annual interest rate mortgageLength = '30Y' ,", "# how much the house is worth when you bought", "much the house is worth when you bought it originationFees", "value ) self.optionList['investmentPropertyDefaults'] = dict( mortgageRate = '4.5%', # Mortgage", "elif kind == 'mortgage': for key,val in self.optionList['mortgageDefaults'].items(): self.options[key] =", "value insuranceRate = '0.0%', # Annual insurance as percentage of", "as a percentage of the loan otherPurchaseFees = '0.5%', #", "= '7.0%', # Annual rate of return of savings inflationRate", "Annual insurance as percentage of home value listingFee = '6.0%',", "'6.0%', # Cost of selling the house capitalGainsTax = '15%'", "as percentage of home value ) self.optionList['investmentPropertyDefaults'] = dict( mortgageRate", "def set_default_options(self): self.optionList = dict() self.optionList['commonDefaults'] = dict( name =", "== 'mortgage': for key,val in self.optionList['mortgageDefaults'].items(): self.options[key] = val elif", "taxRate = '0.6%', # Annual taxes as percentage of home", ") self.optionList['investmentPropertyDefaults'] = dict( mortgageRate = '4.5%', # Mortgage annual", "as percentage of home value ) self.optionList['mortgageDefaults'] = dict( name", "self.set_default_options() self.set_kind_options(kind = kind) self.set_input_options(**inputOptions) def set_default_options(self): self.optionList = dict()", "= '100%', # Amount of money you have before purchase", "percentage of home value paymentsPerYear = '12' , # Number", "Paid if selling house within two years capitalGainsPeriod = '0'", "which cap gains tax is not applied rentalIncome = '0.0%',", "Years after which cap gains tax is not applied )", "= [0,0,0], houseCost = '100%', # how much you are", "if selling house within two years capitalGainsPeriod = '0' ,", "of the loan otherMortgageFees = '0.0%', # Other fees as", "you are paying for the house mortgageRate = '0.0%', #", "rate of inflation - NOT IMPLEMENTED appreciationRate = '5.0%', #", "tax is not applied rentalIncome = '0.6%', # Monthly rental", "self.options[key] = val def set_input_options(self,**inputOptions): for key,val in inputOptions.items(): self.options[key]", "'0%' , # Percentage of house cost paid upfront startingCash", "key,val in self.optionList['rentalDefaults'].items(): self.options[key] = val elif kind == 'investmentProperty':", "downPayment = '20%' , # Percentage of house cost paid", "houseCost = '100%', # how much you are paying for", "rentalIncome = '0.0%', # Monthly rental price as percentage of", "is not applied rentalIncome = '0.6%', # Monthly rental price", "not applied rentalIncome = '0.0%', # Monthly rental price as", "percentage of home value insuranceRate = '0.0%', # Annual insurance", "originationFees = '0.0%', # Mortgage fees as a percentage of", "# Amount of money you have before purchase tvmRate =", "as percentage of home value insuranceRate = '0.0%', # Annual", "interest rate mortgageLength = '30Y' , # Mortgage length (in", "house capitalGainsTax = '15%' , # Paid if selling house", "you have before purchase originationFees = '0.5%', # Mortgage fees", "'0.0%', # Other fees as a percentage of home value", "= '0' , # Years after which cap gains tax", "taxes as percentage of home value insuranceRate = '0.0%', #", "# Other fees as a percentage of home value paymentsPerYear", "= '0.0%', # Other fees as a percentage of the", "two years capitalGainsPeriod = '2' , # Years after which", "house mortgageRate = '0.0%', # Mortgage annual interest rate mortgageLength", ") self.optionList['rentalDefaults'] = dict( rentalPayment = '0.6%', # Monthly rental", "= kind) self.set_input_options(**inputOptions) def set_default_options(self): self.optionList = dict() self.optionList['commonDefaults'] =", "length (in years) downPayment = '0%' , # Percentage of", "Annual taxes as percentage of home value insuranceRate = '0.0%',", "# Annual rate of return of savings inflationRate = '1.8%',", "rate mortgageLength = '30Y' , # Mortgage length (in years)", "percentage of the loan otherMortgageFees = '0.5%', # Other fees", "of home value listingFee = '6.0%', # Cost of selling", "for mortgage options class MortgageOptions: def __init__(self,kind,**inputOptions): self.set_default_options() self.set_kind_options(kind =", "'mortgage', label = 'Mortgage', mortgageRate = '4.5%', # Mortgage annual", "houseValue = '100%', # how much the house is worth", "house houseValue = '100%', # how much the house is", "before purchase originationFees = '0.5%', # Mortgage fees as a", "'0.6%', # Monthly rental price as percentage of home value", "of home value paymentsPerYear = '12' , # Number of", "listingFee = '0.0%', # Cost of selling the house capitalGainsTax", "when you bought it originationFees = '0.5%', # Mortgage fees", "Cost of selling the house capitalGainsTax = '15%' , #", "you bought it originationFees = '0.0%', # Mortgage fees as", "'100%', # Amount of money you have before purchase originationFees", "price as percentage of home value ) self.optionList['investmentPropertyDefaults'] = dict(", "self.set_input_options(**inputOptions) def set_default_options(self): self.optionList = dict() self.optionList['commonDefaults'] = dict( name", "for key,val in self.optionList['investmentPropertyDefaults'].items(): self.options[key] = val def set_input_options(self,**inputOptions): for", "house is worth when you bought it originationFees = '0.5%',", "gains tax is not applied rentalIncome = '0.6%', # Monthly", "loan otherMortgageFees = '0.0%', # Other fees as a percentage", "= '0.0%', # Paid if selling house within two years", "capitalGainsPeriod = '0' , # Years after which cap gains", "purchase tvmRate = '7.0%', # Annual rate of return of", "not applied rentalIncome = '0.6%', # Monthly rental price as", "of house houseValue = '100%', # how much the house", "self.optionList['mortgageDefaults'] = dict( name = 'mortgage', label = 'Mortgage', mortgageRate", "cap gains tax is not applied rentalIncome = '0.0%', #", "selling the house capitalGainsTax = '15%' , # Paid if", "of selling the house capitalGainsTax = '0.0%', # Paid if", "= '5.0%', # Annual rate of increase in value of", "'2' , # Years after which cap gains tax is", "__init__(self,kind,**inputOptions): self.set_default_options() self.set_kind_options(kind = kind) self.set_input_options(**inputOptions) def set_default_options(self): self.optionList =", "# Number of mortgage payments per year taxRate = '0.0%',", "for the house mortgageRate = '0.0%', # Mortgage annual interest", "home value listingFee = '0.0%', # Cost of selling the", "it originationFees = '0.5%', # Mortgage fees as a percentage", "percentage of the loan otherPurchaseFees = '0.5%', # Other fees", "within two years capitalGainsPeriod = '2' , # Years after", "Annual rate of return of savings inflationRate = '1.8%', #", "selling house within two years capitalGainsPeriod = '2' , #", "'5.0%', # Annual rate of increase in value of house", "= '12' , # Number of mortgage payments per year", "of home value ) self.optionList['investmentPropertyDefaults'] = dict( mortgageRate = '4.5%',", "year taxRate = '0.0%', # Annual taxes as percentage of", "mortgage payments per year taxRate = '0.6%', # Annual taxes", "color = [0,0,0], houseCost = '100%', # how much you", "= '0.0%', # Mortgage fees as a percentage of the", "value of house houseValue = '100%', # how much the", "home value ) def set_kind_options(self,kind,**inputOptions): self.options = self.optionList['commonDefaults'] if kind", "fees as a percentage of home value paymentsPerYear = '12'", "the house is worth when you bought it originationFees =", ", # Years after which cap gains tax is not", "is not applied rentalIncome = '0.0%', # Monthly rental price", "of money you have before purchase tvmRate = '7.0%', #", "otherPurchaseFees = '0.0%', # Other fees as a percentage of", "of home value listingFee = '0.0%', # Cost of selling", "= dict( rentalPayment = '0.6%', # Monthly rental price as", "as percentage of home value rentalPayment = '0.0%', # Monthly", "Cost of selling the house capitalGainsTax = '0.0%', # Paid", "'100%', # how much the house is worth when you", "rental price as percentage of home value ) self.optionList['mortgageDefaults'] =", "'mortgage': for key,val in self.optionList['mortgageDefaults'].items(): self.options[key] = val elif kind", "= '0.6%', # Annual taxes as percentage of home value", "self.optionList['rentalDefaults'] = dict( rentalPayment = '0.6%', # Monthly rental price", "kind == 'investmentProperty': for key,val in self.optionList['investmentPropertyDefaults'].items(): self.options[key] = val", "# Annual insurance as percentage of home value listingFee =", "'0.0%', # Cost of selling the house capitalGainsTax = '0.0%',", "kind == 'mortgage': for key,val in self.optionList['mortgageDefaults'].items(): self.options[key] = val", "capitalGainsTax = '15%' , # Paid if selling house within", "self.options[key] = val elif kind == 'investmentProperty': for key,val in", "elif kind == 'investmentProperty': for key,val in self.optionList['investmentPropertyDefaults'].items(): self.options[key] =", "mortgage options class MortgageOptions: def __init__(self,kind,**inputOptions): self.set_default_options() self.set_kind_options(kind = kind)", "class for mortgage options class MortgageOptions: def __init__(self,kind,**inputOptions): self.set_default_options() self.set_kind_options(kind", "Monthly rental price as percentage of home value ) self.optionList['mortgageDefaults']", "set_default_options(self): self.optionList = dict() self.optionList['commonDefaults'] = dict( name = None", "applied rentalIncome = '0.6%', # Monthly rental price as percentage", "as a percentage of the loan otherPurchaseFees = '0.0%', #", "loan otherMortgageFees = '0.5%', # Other fees as a percentage", "val def set_input_options(self,**inputOptions): for key,val in inputOptions.items(): self.options[key] = val", "as a percentage of home value paymentsPerYear = '12' ,", "home value listingFee = '6.0%', # Cost of selling the", "value listingFee = '6.0%', # Cost of selling the house", "have before purchase originationFees = '0.5%', # Mortgage fees as", "in self.optionList['rentalDefaults'].items(): self.options[key] = val elif kind == 'investmentProperty': for", "'0.0%', # Annual taxes as percentage of home value insuranceRate", "== 'rental': for key,val in self.optionList['rentalDefaults'].items(): self.options[key] = val elif", "tax is not applied rentalIncome = '0.0%', # Monthly rental", "Percentage of house cost paid upfront startingCash = '100%', #", ", # Number of mortgage payments per year taxRate =", "of the loan otherMortgageFees = '0.5%', # Other fees as", "= val elif kind == 'rental': for key,val in self.optionList['rentalDefaults'].items():", "# Mortgage annual interest rate mortgageLength = '30Y' , #", "= '0.5%', # Mortgage fees as a percentage of the", "Annual insurance as percentage of home value listingFee = '0.0%',", "key,val in self.optionList['mortgageDefaults'].items(): self.options[key] = val elif kind == 'rental':", "increase in value of house houseValue = '100%', # how", "per year taxRate = '0.6%', # Annual taxes as percentage", "value insuranceRate = '0.4%', # Annual insurance as percentage of", "how much you are paying for the house mortgageRate =", "self.optionList = dict() self.optionList['commonDefaults'] = dict( name = None ,", "rental price as percentage of home value ) def set_kind_options(self,kind,**inputOptions):", "# Amount of money you have before purchase originationFees =", "rentalPayment = '0.6%', # Monthly rental price as percentage of", "Amount of money you have before purchase originationFees = '0.5%',", "house is worth when you bought it originationFees = '0.0%',", "home value paymentsPerYear = '12' , # Number of mortgage", "house within two years capitalGainsPeriod = '0' , # Years", "percentage of the loan otherPurchaseFees = '0.0%', # Other fees", "Annual rate of inflation - NOT IMPLEMENTED appreciationRate = '5.0%',", "# how much you are paying for the house mortgageRate", "bought it originationFees = '0.0%', # Mortgage fees as a", "applied rentalIncome = '0.0%', # Monthly rental price as percentage", "self.optionList['mortgageDefaults'].items(): self.options[key] = val elif kind == 'rental': for key,val", "dict() self.optionList['commonDefaults'] = dict( name = None , label =", "upfront startingCash = '100%', # Amount of money you have", "self.set_kind_options(kind = kind) self.set_input_options(**inputOptions) def set_default_options(self): self.optionList = dict() self.optionList['commonDefaults']", "home value insuranceRate = '0.4%', # Annual insurance as percentage", "of inflation - NOT IMPLEMENTED appreciationRate = '5.0%', # Annual", "insuranceRate = '0.4%', # Annual insurance as percentage of home", "self.optionList['investmentPropertyDefaults'] = dict( mortgageRate = '4.5%', # Mortgage annual interest", "label = 'Mortgage', mortgageRate = '4.5%', # Mortgage annual interest", "# Monthly rental price as percentage of home value rentalPayment", "startingCash = '100%', # Amount of money you have before", "# Annual rate of inflation - NOT IMPLEMENTED appreciationRate =", "rental price as percentage of home value ) self.optionList['investmentPropertyDefaults'] =", "of selling the house capitalGainsTax = '15%' , # Paid", "a percentage of the loan otherMortgageFees = '0.5%', # Other", "after which cap gains tax is not applied ) self.optionList['rentalDefaults']", "fees as a percentage of the loan otherPurchaseFees = '0.0%',", "percentage of the loan otherMortgageFees = '0.0%', # Other fees", "price as percentage of home value rentalPayment = '0.0%', #", "# Percentage of house cost paid upfront startingCash = '100%',", "[0,0,0], houseCost = '100%', # how much you are paying", "'0.0%', # Other fees as a percentage of the loan", "year taxRate = '0.6%', # Annual taxes as percentage of", "== 'investmentProperty': for key,val in self.optionList['investmentPropertyDefaults'].items(): self.options[key] = val def", "savings inflationRate = '1.8%', # Annual rate of inflation -", "when you bought it originationFees = '0.0%', # Mortgage fees", "otherMortgageFees = '0.0%', # Other fees as a percentage of", "before purchase tvmRate = '7.0%', # Annual rate of return", "'0.0%', # Annual insurance as percentage of home value listingFee", "after which cap gains tax is not applied rentalIncome =", "= 'Mortgage', mortgageRate = '4.5%', # Mortgage annual interest rate", "a percentage of home value paymentsPerYear = '12' , #", "kind == None: pass elif kind == 'mortgage': for key,val", "home value rentalPayment = '0.0%', # Monthly rental price as", ", # Paid if selling house within two years capitalGainsPeriod", "= val elif kind == 'investmentProperty': for key,val in self.optionList['investmentPropertyDefaults'].items():", "capitalGainsPeriod = '2' , # Years after which cap gains", "otherPurchaseFees = '0.5%', # Other fees as a percentage of", "purchase originationFees = '0.5%', # Mortgage fees as a percentage", "= '6.0%', # Cost of selling the house capitalGainsTax =", "appreciationRate = '5.0%', # Annual rate of increase in value", "= '0.4%', # Annual insurance as percentage of home value", "price as percentage of home value ) def set_kind_options(self,kind,**inputOptions): self.options", "the house mortgageRate = '0.0%', # Mortgage annual interest rate", "options class MortgageOptions: def __init__(self,kind,**inputOptions): self.set_default_options() self.set_kind_options(kind = kind) self.set_input_options(**inputOptions)", "'30Y' , # Mortgage length (in years) downPayment = '20%'", "worth when you bought it originationFees = '0.0%', # Mortgage", "as a percentage of the loan otherMortgageFees = '0.5%', #", "= '0.0%', # Monthly rental price as percentage of home", "= '0.0%', # Cost of selling the house capitalGainsTax =", "= '0.0%', # Other fees as a percentage of home", "self.optionList['commonDefaults'] = dict( name = None , label = None", "annual interest rate mortgageLength = '30Y' , # Mortgage length", "payments per year taxRate = '0.0%', # Annual taxes as", "= '1.8%', # Annual rate of inflation - NOT IMPLEMENTED", "Amount of money you have before purchase tvmRate = '7.0%',", "of the loan otherPurchaseFees = '0.5%', # Other fees as", "as percentage of home value ) def set_kind_options(self,kind,**inputOptions): self.options =", "# Mortgage length (in years) downPayment = '20%' , #", "def __init__(self,kind,**inputOptions): self.set_default_options() self.set_kind_options(kind = kind) self.set_input_options(**inputOptions) def set_default_options(self): self.optionList", "= '20%' , # Percentage of house cost paid upfront", "NOT IMPLEMENTED appreciationRate = '5.0%', # Annual rate of increase", "the loan otherMortgageFees = '0.0%', # Other fees as a", "None: pass elif kind == 'mortgage': for key,val in self.optionList['mortgageDefaults'].items():", "'rental': for key,val in self.optionList['rentalDefaults'].items(): self.options[key] = val elif kind", "'0.0%', # Mortgage fees as a percentage of the loan", "years capitalGainsPeriod = '2' , # Years after which cap", "which cap gains tax is not applied rentalIncome = '0.6%',", "dict( mortgageRate = '4.5%', # Mortgage annual interest rate mortgageLength", "in value of house houseValue = '100%', # how much", "of home value insuranceRate = '0.0%', # Annual insurance as", "MortgageOptions: def __init__(self,kind,**inputOptions): self.set_default_options() self.set_kind_options(kind = kind) self.set_input_options(**inputOptions) def set_default_options(self):", "home value ) self.optionList['mortgageDefaults'] = dict( name = 'mortgage', label", "= '0.0%', # Annual taxes as percentage of home value", "'0.0%', # Monthly rental price as percentage of home value", ") self.optionList['mortgageDefaults'] = dict( name = 'mortgage', label = 'Mortgage',", "cap gains tax is not applied ) self.optionList['rentalDefaults'] = dict(", "class MortgageOptions: def __init__(self,kind,**inputOptions): self.set_default_options() self.set_kind_options(kind = kind) self.set_input_options(**inputOptions) def", "applied ) self.optionList['rentalDefaults'] = dict( rentalPayment = '0.6%', # Monthly", "as a percentage of the loan otherMortgageFees = '0.0%', #", "value rentalPayment = '0.0%', # Monthly rental price as percentage", "loan otherPurchaseFees = '0.0%', # Other fees as a percentage", "key,val in self.optionList['investmentPropertyDefaults'].items(): self.options[key] = val def set_input_options(self,**inputOptions): for key,val", "'12' , # Number of mortgage payments per year taxRate", "taxRate = '0.0%', # Annual taxes as percentage of home", "tvmRate = '7.0%', # Annual rate of return of savings", "mortgage payments per year taxRate = '0.0%', # Annual taxes", ", # Mortgage length (in years) downPayment = '0%' ,", "return of savings inflationRate = '1.8%', # Annual rate of", "of house cost paid upfront startingCash = '100%', # Amount", "if selling house within two years capitalGainsPeriod = '2' ,", "the loan otherPurchaseFees = '0.0%', # Other fees as a", "in self.optionList['mortgageDefaults'].items(): self.options[key] = val elif kind == 'rental': for", ", color = [0,0,0], houseCost = '100%', # how much", "house within two years capitalGainsPeriod = '2' , # Years", "insuranceRate = '0.0%', # Annual insurance as percentage of home", "rentalIncome = '0.6%', # Monthly rental price as percentage of", "- NOT IMPLEMENTED appreciationRate = '5.0%', # Annual rate of", "'0.0%', # Mortgage annual interest rate mortgageLength = '30Y' ,", "house capitalGainsTax = '0.0%', # Paid if selling house within", "# Factory-like class for mortgage options class MortgageOptions: def __init__(self,kind,**inputOptions):", "gains tax is not applied rentalIncome = '0.0%', # Monthly", "= '100%', # how much the house is worth when", "None , label = None , color = [0,0,0], houseCost", "listingFee = '6.0%', # Cost of selling the house capitalGainsTax", "'100%', # how much you are paying for the house", "self.optionList['commonDefaults'] if kind == None: pass elif kind == 'mortgage':", "the loan otherMortgageFees = '0.5%', # Other fees as a", "it originationFees = '0.0%', # Mortgage fees as a percentage", "= val def set_input_options(self,**inputOptions): for key,val in inputOptions.items(): self.options[key] =", "Number of mortgage payments per year taxRate = '0.6%', #", "is not applied ) self.optionList['rentalDefaults'] = dict( rentalPayment = '0.6%',", "# Cost of selling the house capitalGainsTax = '15%' ,", "label = None , color = [0,0,0], houseCost = '100%',", "two years capitalGainsPeriod = '0' , # Years after which", "the house capitalGainsTax = '15%' , # Paid if selling", "= dict( name = None , label = None ,", "percentage of home value listingFee = '0.0%', # Cost of", "is worth when you bought it originationFees = '0.0%', #", "# Mortgage length (in years) downPayment = '0%' , #", "# Annual rate of increase in value of house houseValue", "Other fees as a percentage of home value paymentsPerYear =", "= self.optionList['commonDefaults'] if kind == None: pass elif kind ==", "of home value rentalPayment = '0.0%', # Monthly rental price", "worth when you bought it originationFees = '0.5%', # Mortgage", "of home value ) def set_kind_options(self,kind,**inputOptions): self.options = self.optionList['commonDefaults'] if", "if kind == None: pass elif kind == 'mortgage': for", "= '0%' , # Percentage of house cost paid upfront", "years capitalGainsPeriod = '0' , # Years after which cap", "= '30Y' , # Mortgage length (in years) downPayment =", "much you are paying for the house mortgageRate = '0.0%',", "cost paid upfront startingCash = '100%', # Amount of money", "'0.5%', # Mortgage fees as a percentage of the loan", "kind == 'rental': for key,val in self.optionList['rentalDefaults'].items(): self.options[key] = val", "'15%' , # Paid if selling house within two years", "self.options[key] = val elif kind == 'rental': for key,val in", "Annual taxes as percentage of home value insuranceRate = '0.4%',", ", # Percentage of house cost paid upfront startingCash =", "IMPLEMENTED appreciationRate = '5.0%', # Annual rate of increase in", "of mortgage payments per year taxRate = '0.6%', # Annual", "Monthly rental price as percentage of home value rentalPayment =", "None , color = [0,0,0], houseCost = '100%', # how", "value ) self.optionList['mortgageDefaults'] = dict( name = 'mortgage', label =", "mortgageLength = '30Y' , # Mortgage length (in years) downPayment", "mortgageRate = '4.5%', # Mortgage annual interest rate mortgageLength =", "= '0.5%', # Other fees as a percentage of the", "'0' , # Years after which cap gains tax is", "name = 'mortgage', label = 'Mortgage', mortgageRate = '4.5%', #", "self.optionList['investmentPropertyDefaults'].items(): self.options[key] = val def set_input_options(self,**inputOptions): for key,val in inputOptions.items():", "how much the house is worth when you bought it", "fees as a percentage of the loan otherPurchaseFees = '0.5%',", "self.options = self.optionList['commonDefaults'] if kind == None: pass elif kind", "= None , label = None , color = [0,0,0],", "= '100%', # how much you are paying for the", "as percentage of home value listingFee = '6.0%', # Cost", "'0.6%', # Annual taxes as percentage of home value insuranceRate", "Mortgage fees as a percentage of the loan otherMortgageFees =", "= 'mortgage', label = 'Mortgage', mortgageRate = '4.5%', # Mortgage", "gains tax is not applied ) self.optionList['rentalDefaults'] = dict( rentalPayment", "= dict( mortgageRate = '4.5%', # Mortgage annual interest rate", "Mortgage length (in years) downPayment = '20%' , # Percentage", "(in years) downPayment = '20%' , # Percentage of house", "def set_kind_options(self,kind,**inputOptions): self.options = self.optionList['commonDefaults'] if kind == None: pass", "'100%', # Amount of money you have before purchase tvmRate", "of return of savings inflationRate = '1.8%', # Annual rate", "val elif kind == 'rental': for key,val in self.optionList['rentalDefaults'].items(): self.options[key]", "percentage of home value ) self.optionList['investmentPropertyDefaults'] = dict( mortgageRate =", "of the loan otherPurchaseFees = '0.0%', # Other fees as", "money you have before purchase originationFees = '0.5%', # Mortgage", "the loan otherPurchaseFees = '0.5%', # Other fees as a", "'Mortgage', mortgageRate = '4.5%', # Mortgage annual interest rate mortgageLength", "the house capitalGainsTax = '0.0%', # Paid if selling house", "'0.5%', # Other fees as a percentage of home value", "= '0.0%', # Annual insurance as percentage of home value", "house cost paid upfront startingCash = '100%', # Amount of", "of home value ) self.optionList['mortgageDefaults'] = dict( name = 'mortgage',", "a percentage of the loan otherPurchaseFees = '0.0%', # Other", "Annual rate of increase in value of house houseValue =" ]
[ "__init__(self, json, width, height, scale=4, terrain_types=4): super(Terrain, self).__init__(json) self._scale =", "width, height): self.splat = self._rot180_map(self.splat) def rot270(self, width, height): self.splat", "height): self.splat = np.flipud(self.splat) def rot90(self, width, height): self.splat =", "top, bottom, left, right): self.splat = self._crop_map_safe(self.splat, top, bottom, left,", "order='C') def get_json(self): json = self._json json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C'))", "np class Terrain(Entity): def __init__(self, json, width, height, scale=4, terrain_types=4):", "bottom*self._scale), (left*self._scale, right*self._scale), (0,0)), mode='edge') def crop(self, top, bottom, left,", "rot180(self, width, height): self.splat = self._rot180_map(self.splat) def rot270(self, width, height):", "bottom, left, right): self.splat = np.pad(self.splat, ((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale),", "Terrain(Entity): def __init__(self, json, width, height, scale=4, terrain_types=4): super(Terrain, self).__init__(json)", "np.fliplr(self.splat) def flipud(self, height): self.splat = np.flipud(self.splat) def rot90(self, width,", "json, width, height, scale=4, terrain_types=4): super(Terrain, self).__init__(json) self._scale = scale", "width*self._scale, self.terrain_types, order='C') def get_json(self): json = self._json json['splat'] =", "left, right): self.splat = np.pad(self.splat, ((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale), (0,0)),", "mode='edge') def crop(self, top, bottom, left, right): self.splat = self._crop_map_safe(self.splat,", "PoolByteArray2NumpyArray, NumpyArray2PoolByteArray from DD.Entity import Entity import numpy as np", "scale=4, terrain_types=4): super(Terrain, self).__init__(json) self._scale = scale self.terrain_types = terrain_types", "return json def pad(self, top, bottom, left, right): self.splat =", "np.flipud(self.splat) def rot90(self, width, height): self.splat = self._rot90_map(self.splat) def rot180(self,", "super(Terrain, self).__init__(json) self._scale = scale self.terrain_types = terrain_types self.splat =", "json def pad(self, top, bottom, left, right): self.splat = np.pad(self.splat,", "self.splat = self._rot90_map(self.splat) def rot180(self, width, height): self.splat = self._rot180_map(self.splat)", "NumpyArray2PoolByteArray from DD.Entity import Entity import numpy as np class", "json = self._json json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C')) return json def", "bottom, left, right): self.splat = self._crop_map_safe(self.splat, top, bottom, left, right,", "width): self.splat = np.fliplr(self.splat) def flipud(self, height): self.splat = np.flipud(self.splat)", "flipud(self, height): self.splat = np.flipud(self.splat) def rot90(self, width, height): self.splat", "as np class Terrain(Entity): def __init__(self, json, width, height, scale=4,", "top, bottom, left, right, self._scale) def fliplr(self, width): self.splat =", "self.splat = np.pad(self.splat, ((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale), (0,0)), mode='edge') def", "right): self.splat = self._crop_map_safe(self.splat, top, bottom, left, right, self._scale) def", "def pad(self, top, bottom, left, right): self.splat = np.pad(self.splat, ((top*self._scale,", "class Terrain(Entity): def __init__(self, json, width, height, scale=4, terrain_types=4): super(Terrain,", "def __init__(self, json, width, height, scale=4, terrain_types=4): super(Terrain, self).__init__(json) self._scale", "(left*self._scale, right*self._scale), (0,0)), mode='edge') def crop(self, top, bottom, left, right):", "def rot180(self, width, height): self.splat = self._rot180_map(self.splat) def rot270(self, width,", "self._json json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C')) return json def pad(self, top,", "right, self._scale) def fliplr(self, width): self.splat = np.fliplr(self.splat) def flipud(self,", "DD.utils import PoolByteArray2NumpyArray, NumpyArray2PoolByteArray from DD.Entity import Entity import numpy", "self._scale = scale self.terrain_types = terrain_types self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale,", "= self._rot90_map(self.splat) def rot180(self, width, height): self.splat = self._rot180_map(self.splat) def", "self.terrain_types = terrain_types self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C') def", "height, scale=4, terrain_types=4): super(Terrain, self).__init__(json) self._scale = scale self.terrain_types =", "terrain_types self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C') def get_json(self): json", "PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C') def get_json(self): json = self._json json['splat']", "self._scale) def fliplr(self, width): self.splat = np.fliplr(self.splat) def flipud(self, height):", "import Entity import numpy as np class Terrain(Entity): def __init__(self,", "from DD.Entity import Entity import numpy as np class Terrain(Entity):", "import PoolByteArray2NumpyArray, NumpyArray2PoolByteArray from DD.Entity import Entity import numpy as", "self.terrain_types, order='C') def get_json(self): json = self._json json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape),", "self.splat = np.flipud(self.splat) def rot90(self, width, height): self.splat = self._rot90_map(self.splat)", "self.splat = self._rot180_map(self.splat) def rot270(self, width, height): self.splat = self._rot270_map(self.splat)", "top, bottom, left, right): self.splat = np.pad(self.splat, ((top*self._scale, bottom*self._scale), (left*self._scale,", "self._rot90_map(self.splat) def rot180(self, width, height): self.splat = self._rot180_map(self.splat) def rot270(self,", "self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C') def get_json(self): json =", "(0,0)), mode='edge') def crop(self, top, bottom, left, right): self.splat =", "from DD.utils import PoolByteArray2NumpyArray, NumpyArray2PoolByteArray from DD.Entity import Entity import", "get_json(self): json = self._json json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C')) return json", "height): self.splat = self._rot90_map(self.splat) def rot180(self, width, height): self.splat =", "= np.flipud(self.splat) def rot90(self, width, height): self.splat = self._rot90_map(self.splat) def", "height): self.splat = self._rot180_map(self.splat) def rot270(self, width, height): self.splat =", "import numpy as np class Terrain(Entity): def __init__(self, json, width,", "numpy as np class Terrain(Entity): def __init__(self, json, width, height,", "np.pad(self.splat, ((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale), (0,0)), mode='edge') def crop(self, top,", "Entity import numpy as np class Terrain(Entity): def __init__(self, json,", "((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale), (0,0)), mode='edge') def crop(self, top, bottom,", "json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C')) return json def pad(self, top, bottom,", "scale self.terrain_types = terrain_types self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C')", "def rot90(self, width, height): self.splat = self._rot90_map(self.splat) def rot180(self, width,", "right*self._scale), (0,0)), mode='edge') def crop(self, top, bottom, left, right): self.splat", "def flipud(self, height): self.splat = np.flipud(self.splat) def rot90(self, width, height):", "= np.pad(self.splat, ((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale), (0,0)), mode='edge') def crop(self,", "rot90(self, width, height): self.splat = self._rot90_map(self.splat) def rot180(self, width, height):", "self._crop_map_safe(self.splat, top, bottom, left, right, self._scale) def fliplr(self, width): self.splat", "= NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C')) return json def pad(self, top, bottom, left,", "= self._crop_map_safe(self.splat, top, bottom, left, right, self._scale) def fliplr(self, width):", "def fliplr(self, width): self.splat = np.fliplr(self.splat) def flipud(self, height): self.splat", "= terrain_types self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C') def get_json(self):", "pad(self, top, bottom, left, right): self.splat = np.pad(self.splat, ((top*self._scale, bottom*self._scale),", "right): self.splat = np.pad(self.splat, ((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale), (0,0)), mode='edge')", "self.splat = self._crop_map_safe(self.splat, top, bottom, left, right, self._scale) def fliplr(self,", "width, height): self.splat = self._rot90_map(self.splat) def rot180(self, width, height): self.splat", "= np.fliplr(self.splat) def flipud(self, height): self.splat = np.flipud(self.splat) def rot90(self,", "NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C')) return json def pad(self, top, bottom, left, right):", "= scale self.terrain_types = terrain_types self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types,", "crop(self, top, bottom, left, right): self.splat = self._crop_map_safe(self.splat, top, bottom,", "left, right): self.splat = self._crop_map_safe(self.splat, top, bottom, left, right, self._scale)", "DD.Entity import Entity import numpy as np class Terrain(Entity): def", "= PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C') def get_json(self): json = self._json", "bottom, left, right, self._scale) def fliplr(self, width): self.splat = np.fliplr(self.splat)", "width, height, scale=4, terrain_types=4): super(Terrain, self).__init__(json) self._scale = scale self.terrain_types", "terrain_types=4): super(Terrain, self).__init__(json) self._scale = scale self.terrain_types = terrain_types self.splat", "def get_json(self): json = self._json json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C')) return", "order='C')) return json def pad(self, top, bottom, left, right): self.splat", "self.splat = np.fliplr(self.splat) def flipud(self, height): self.splat = np.flipud(self.splat) def", "= self._json json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C')) return json def pad(self,", "def crop(self, top, bottom, left, right): self.splat = self._crop_map_safe(self.splat, top,", "self).__init__(json) self._scale = scale self.terrain_types = terrain_types self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale,", "left, right, self._scale) def fliplr(self, width): self.splat = np.fliplr(self.splat) def", "fliplr(self, width): self.splat = np.fliplr(self.splat) def flipud(self, height): self.splat =" ]
[ "self.start = [] self.stop = {} self.descriptor = defaultdict(list) self.event", "__init__(self, msg_hook=None): self.msgs = [] self.msg_hook = msg_hook def __call__(self,", "self.msg_hook = msg_hook def __call__(self, msg): self.msgs.append(msg) if self.msg_hook: self.msg_hook(msg)", "self.msg_hook: self.msg_hook(msg) class DocCollector: def __init__(self): self.start = [] self.stop", "= [] self.stop = {} self.descriptor = defaultdict(list) self.event =", "== 'bulk_events': for k, v in doc.items(): self.event[k].extend(v) else: self.event[doc[\"descriptor\"]].append(doc)", "collections import defaultdict import contextlib import tempfile import sys import", "= sys.stdout try: fout = tempfile.TemporaryFile(mode=\"w+\", encoding=\"utf-8\") sys.stdout = fout", "import contextlib import tempfile import sys import threading import asyncio", "sys.stdout = old_stdout class MsgCollector: def __init__(self, msg_hook=None): self.msgs =", "asyncio.Event() th_ev.set() h = loop.call_soon_threadsafe(really_make_the_event) if not th_ev.wait(0.1): h.cancel() raise", "= defaultdict(list) self.event = {} def insert(self, name, doc): if", "if not th_ev.wait(0.1): h.cancel() raise Exception(\"failed to make asyncio event\")", "{} def insert(self, name, doc): if name == \"start\": self.start.append(doc)", "for k, v in doc.items(): self.event[k].extend(v) else: self.event[doc[\"descriptor\"]].append(doc) def _fabricate_asycio_event(loop):", "'bulk_events': for k, v in doc.items(): self.event[k].extend(v) else: self.event[doc[\"descriptor\"]].append(doc) def", "= threading.Event() aio_event = None def really_make_the_event(): nonlocal aio_event aio_event", "_fabricate_asycio_event(loop): th_ev = threading.Event() aio_event = None def really_make_the_event(): nonlocal", "insert(self, name, doc): if name == \"start\": self.start.append(doc) elif name", "\"start\": self.start.append(doc) elif name == \"stop\": self.stop[doc[\"run_start\"]] = doc elif", "== \"start\": self.start.append(doc) elif name == \"stop\": self.stop[doc[\"run_start\"]] = doc", "class DocCollector: def __init__(self): self.start = [] self.stop = {}", "\"descriptor\": self.descriptor[doc[\"run_start\"]].append(doc) self.event[doc[\"uid\"]] = [] elif name == 'bulk_events': for", "from collections import defaultdict import contextlib import tempfile import sys", "self.event[k].extend(v) else: self.event[doc[\"descriptor\"]].append(doc) def _fabricate_asycio_event(loop): th_ev = threading.Event() aio_event =", "= None def really_make_the_event(): nonlocal aio_event aio_event = asyncio.Event() th_ev.set()", "in doc.items(): self.event[k].extend(v) else: self.event[doc[\"descriptor\"]].append(doc) def _fabricate_asycio_event(loop): th_ev = threading.Event()", "self.start.append(doc) elif name == \"stop\": self.stop[doc[\"run_start\"]] = doc elif name", "import defaultdict import contextlib import tempfile import sys import threading", "= [] self.msg_hook = msg_hook def __call__(self, msg): self.msgs.append(msg) if", "threading import asyncio @contextlib.contextmanager def _print_redirect(): old_stdout = sys.stdout try:", "fout yield fout finally: sys.stdout = old_stdout class MsgCollector: def", "tempfile import sys import threading import asyncio @contextlib.contextmanager def _print_redirect():", "really_make_the_event(): nonlocal aio_event aio_event = asyncio.Event() th_ev.set() h = loop.call_soon_threadsafe(really_make_the_event)", "else: self.event[doc[\"descriptor\"]].append(doc) def _fabricate_asycio_event(loop): th_ev = threading.Event() aio_event = None", "def __init__(self): self.start = [] self.stop = {} self.descriptor =", "k, v in doc.items(): self.event[k].extend(v) else: self.event[doc[\"descriptor\"]].append(doc) def _fabricate_asycio_event(loop): th_ev", "def _print_redirect(): old_stdout = sys.stdout try: fout = tempfile.TemporaryFile(mode=\"w+\", encoding=\"utf-8\")", "= fout yield fout finally: sys.stdout = old_stdout class MsgCollector:", "name == \"stop\": self.stop[doc[\"run_start\"]] = doc elif name == \"descriptor\":", "asyncio @contextlib.contextmanager def _print_redirect(): old_stdout = sys.stdout try: fout =", "yield fout finally: sys.stdout = old_stdout class MsgCollector: def __init__(self,", "self.event[doc[\"descriptor\"]].append(doc) def _fabricate_asycio_event(loop): th_ev = threading.Event() aio_event = None def", "self.event = {} def insert(self, name, doc): if name ==", "<reponame>AbbyGi/bluesky<filename>bluesky/tests/utils.py<gh_stars>10-100 from collections import defaultdict import contextlib import tempfile import", "fout = tempfile.TemporaryFile(mode=\"w+\", encoding=\"utf-8\") sys.stdout = fout yield fout finally:", "msg_hook def __call__(self, msg): self.msgs.append(msg) if self.msg_hook: self.msg_hook(msg) class DocCollector:", "[] self.msg_hook = msg_hook def __call__(self, msg): self.msgs.append(msg) if self.msg_hook:", "= [] elif name == 'bulk_events': for k, v in", "sys import threading import asyncio @contextlib.contextmanager def _print_redirect(): old_stdout =", "elif name == \"descriptor\": self.descriptor[doc[\"run_start\"]].append(doc) self.event[doc[\"uid\"]] = [] elif name", "th_ev.wait(0.1): h.cancel() raise Exception(\"failed to make asyncio event\") return aio_event", "__call__(self, msg): self.msgs.append(msg) if self.msg_hook: self.msg_hook(msg) class DocCollector: def __init__(self):", "= asyncio.Event() th_ev.set() h = loop.call_soon_threadsafe(really_make_the_event) if not th_ev.wait(0.1): h.cancel()", "h = loop.call_soon_threadsafe(really_make_the_event) if not th_ev.wait(0.1): h.cancel() raise Exception(\"failed to", "import threading import asyncio @contextlib.contextmanager def _print_redirect(): old_stdout = sys.stdout", "loop.call_soon_threadsafe(really_make_the_event) if not th_ev.wait(0.1): h.cancel() raise Exception(\"failed to make asyncio", "try: fout = tempfile.TemporaryFile(mode=\"w+\", encoding=\"utf-8\") sys.stdout = fout yield fout", "aio_event aio_event = asyncio.Event() th_ev.set() h = loop.call_soon_threadsafe(really_make_the_event) if not", "if name == \"start\": self.start.append(doc) elif name == \"stop\": self.stop[doc[\"run_start\"]]", "elif name == 'bulk_events': for k, v in doc.items(): self.event[k].extend(v)", "import tempfile import sys import threading import asyncio @contextlib.contextmanager def", "tempfile.TemporaryFile(mode=\"w+\", encoding=\"utf-8\") sys.stdout = fout yield fout finally: sys.stdout =", "[] self.stop = {} self.descriptor = defaultdict(list) self.event = {}", "msg_hook=None): self.msgs = [] self.msg_hook = msg_hook def __call__(self, msg):", "fout finally: sys.stdout = old_stdout class MsgCollector: def __init__(self, msg_hook=None):", "def __call__(self, msg): self.msgs.append(msg) if self.msg_hook: self.msg_hook(msg) class DocCollector: def", "th_ev = threading.Event() aio_event = None def really_make_the_event(): nonlocal aio_event", "name, doc): if name == \"start\": self.start.append(doc) elif name ==", "== \"descriptor\": self.descriptor[doc[\"run_start\"]].append(doc) self.event[doc[\"uid\"]] = [] elif name == 'bulk_events':", "_print_redirect(): old_stdout = sys.stdout try: fout = tempfile.TemporaryFile(mode=\"w+\", encoding=\"utf-8\") sys.stdout", "== \"stop\": self.stop[doc[\"run_start\"]] = doc elif name == \"descriptor\": self.descriptor[doc[\"run_start\"]].append(doc)", "doc.items(): self.event[k].extend(v) else: self.event[doc[\"descriptor\"]].append(doc) def _fabricate_asycio_event(loop): th_ev = threading.Event() aio_event", "self.descriptor = defaultdict(list) self.event = {} def insert(self, name, doc):", "defaultdict import contextlib import tempfile import sys import threading import", "name == 'bulk_events': for k, v in doc.items(): self.event[k].extend(v) else:", "= tempfile.TemporaryFile(mode=\"w+\", encoding=\"utf-8\") sys.stdout = fout yield fout finally: sys.stdout", "class MsgCollector: def __init__(self, msg_hook=None): self.msgs = [] self.msg_hook =", "sys.stdout try: fout = tempfile.TemporaryFile(mode=\"w+\", encoding=\"utf-8\") sys.stdout = fout yield", "doc): if name == \"start\": self.start.append(doc) elif name == \"stop\":", "aio_event = asyncio.Event() th_ev.set() h = loop.call_soon_threadsafe(really_make_the_event) if not th_ev.wait(0.1):", "aio_event = None def really_make_the_event(): nonlocal aio_event aio_event = asyncio.Event()", "self.stop = {} self.descriptor = defaultdict(list) self.event = {} def", "def __init__(self, msg_hook=None): self.msgs = [] self.msg_hook = msg_hook def", "= {} def insert(self, name, doc): if name == \"start\":", "th_ev.set() h = loop.call_soon_threadsafe(really_make_the_event) if not th_ev.wait(0.1): h.cancel() raise Exception(\"failed", "elif name == \"stop\": self.stop[doc[\"run_start\"]] = doc elif name ==", "= doc elif name == \"descriptor\": self.descriptor[doc[\"run_start\"]].append(doc) self.event[doc[\"uid\"]] = []", "if self.msg_hook: self.msg_hook(msg) class DocCollector: def __init__(self): self.start = []", "= {} self.descriptor = defaultdict(list) self.event = {} def insert(self,", "= msg_hook def __call__(self, msg): self.msgs.append(msg) if self.msg_hook: self.msg_hook(msg) class", "self.descriptor[doc[\"run_start\"]].append(doc) self.event[doc[\"uid\"]] = [] elif name == 'bulk_events': for k,", "[] elif name == 'bulk_events': for k, v in doc.items():", "sys.stdout = fout yield fout finally: sys.stdout = old_stdout class", "self.msgs = [] self.msg_hook = msg_hook def __call__(self, msg): self.msgs.append(msg)", "self.event[doc[\"uid\"]] = [] elif name == 'bulk_events': for k, v", "old_stdout class MsgCollector: def __init__(self, msg_hook=None): self.msgs = [] self.msg_hook", "name == \"descriptor\": self.descriptor[doc[\"run_start\"]].append(doc) self.event[doc[\"uid\"]] = [] elif name ==", "MsgCollector: def __init__(self, msg_hook=None): self.msgs = [] self.msg_hook = msg_hook", "self.msgs.append(msg) if self.msg_hook: self.msg_hook(msg) class DocCollector: def __init__(self): self.start =", "old_stdout = sys.stdout try: fout = tempfile.TemporaryFile(mode=\"w+\", encoding=\"utf-8\") sys.stdout =", "None def really_make_the_event(): nonlocal aio_event aio_event = asyncio.Event() th_ev.set() h", "name == \"start\": self.start.append(doc) elif name == \"stop\": self.stop[doc[\"run_start\"]] =", "doc elif name == \"descriptor\": self.descriptor[doc[\"run_start\"]].append(doc) self.event[doc[\"uid\"]] = [] elif", "{} self.descriptor = defaultdict(list) self.event = {} def insert(self, name,", "nonlocal aio_event aio_event = asyncio.Event() th_ev.set() h = loop.call_soon_threadsafe(really_make_the_event) if", "import asyncio @contextlib.contextmanager def _print_redirect(): old_stdout = sys.stdout try: fout", "DocCollector: def __init__(self): self.start = [] self.stop = {} self.descriptor", "contextlib import tempfile import sys import threading import asyncio @contextlib.contextmanager", "def insert(self, name, doc): if name == \"start\": self.start.append(doc) elif", "\"stop\": self.stop[doc[\"run_start\"]] = doc elif name == \"descriptor\": self.descriptor[doc[\"run_start\"]].append(doc) self.event[doc[\"uid\"]]", "encoding=\"utf-8\") sys.stdout = fout yield fout finally: sys.stdout = old_stdout", "self.msg_hook(msg) class DocCollector: def __init__(self): self.start = [] self.stop =", "= loop.call_soon_threadsafe(really_make_the_event) if not th_ev.wait(0.1): h.cancel() raise Exception(\"failed to make", "msg): self.msgs.append(msg) if self.msg_hook: self.msg_hook(msg) class DocCollector: def __init__(self): self.start", "= old_stdout class MsgCollector: def __init__(self, msg_hook=None): self.msgs = []", "@contextlib.contextmanager def _print_redirect(): old_stdout = sys.stdout try: fout = tempfile.TemporaryFile(mode=\"w+\",", "defaultdict(list) self.event = {} def insert(self, name, doc): if name", "import sys import threading import asyncio @contextlib.contextmanager def _print_redirect(): old_stdout", "threading.Event() aio_event = None def really_make_the_event(): nonlocal aio_event aio_event =", "self.stop[doc[\"run_start\"]] = doc elif name == \"descriptor\": self.descriptor[doc[\"run_start\"]].append(doc) self.event[doc[\"uid\"]] =", "def really_make_the_event(): nonlocal aio_event aio_event = asyncio.Event() th_ev.set() h =", "not th_ev.wait(0.1): h.cancel() raise Exception(\"failed to make asyncio event\") return", "v in doc.items(): self.event[k].extend(v) else: self.event[doc[\"descriptor\"]].append(doc) def _fabricate_asycio_event(loop): th_ev =", "def _fabricate_asycio_event(loop): th_ev = threading.Event() aio_event = None def really_make_the_event():", "finally: sys.stdout = old_stdout class MsgCollector: def __init__(self, msg_hook=None): self.msgs", "__init__(self): self.start = [] self.stop = {} self.descriptor = defaultdict(list)" ]
[ "try: Checker( json.load(data), mode=mode, ).run_check() except CheckException as e: print(f\"Check", "if not files: print(\"No files specified.\") return 1 possible_modes =", "print(f\"Check for {f} successful.\") return 1 if failed else 0", "x not in possible_modes] failed = False for f in", "return 1 possible_modes = tuple(f\"--{mode}\" for mode in Checker.modes) modes", "for f in files: with open(f) as data: try: Checker(", "{f} successful.\") return 1 if failed else 0 if __name__", "exit(1) else: mode = modes[0] if len(modes): files = [x", "specified.\") return 1 possible_modes = tuple(f\"--{mode}\" for mode in Checker.modes)", "CheckException def main() -> int: files = sys.argv[1:] if not", "else: print(f\"Check for {f} successful.\") return 1 if failed else", "-> int: files = sys.argv[1:] if not files: print(\"No files", "elif len(modes) > 1: print(f\"You can only choose one mode", "files: print(\"No files specified.\") return 1 possible_modes = tuple(f\"--{mode}\" for", "f in files: with open(f) as data: try: Checker( json.load(data),", "= \"all\" elif len(modes) > 1: print(f\"You can only choose", "1 if failed else 0 if __name__ == \"__main__\": sys.exit(main())", "files if x not in possible_modes] failed = False for", "True else: print(f\"Check for {f} successful.\") return 1 if failed", "as data: try: Checker( json.load(data), mode=mode, ).run_check() except CheckException as", "if len(modes) == 0: mode = \"all\" elif len(modes) >", "def main() -> int: files = sys.argv[1:] if not files:", "mode = modes[0] if len(modes): files = [x for x", "modes[0] if len(modes): files = [x for x in files", "1: print(f\"You can only choose one mode of {', '.join(possible_modes)}.\")", "sys from openslides_backend.models.checker import Checker, CheckException def main() -> int:", "= False for f in files: with open(f) as data:", "in Checker.modes) modes = tuple(mode[2:] for mode in possible_modes if", "sys.argv[1:] if not files: print(\"No files specified.\") return 1 possible_modes", "= tuple(f\"--{mode}\" for mode in Checker.modes) modes = tuple(mode[2:] for", "files = sys.argv[1:] if not files: print(\"No files specified.\") return", "else: mode = modes[0] if len(modes): files = [x for", "if len(modes): files = [x for x in files if", "choose one mode of {', '.join(possible_modes)}.\") exit(1) else: mode =", "import Checker, CheckException def main() -> int: files = sys.argv[1:]", "except CheckException as e: print(f\"Check for {f} failed:\\n\", e) failed", "data: try: Checker( json.load(data), mode=mode, ).run_check() except CheckException as e:", "mode in possible_modes if mode in files) if len(modes) ==", "possible_modes] failed = False for f in files: with open(f)", "mode=mode, ).run_check() except CheckException as e: print(f\"Check for {f} failed:\\n\",", "for x in files if x not in possible_modes] failed", "CheckException as e: print(f\"Check for {f} failed:\\n\", e) failed =", "mode of {', '.join(possible_modes)}.\") exit(1) else: mode = modes[0] if", "successful.\") return 1 if failed else 0 if __name__ ==", "0: mode = \"all\" elif len(modes) > 1: print(f\"You can", "in files) if len(modes) == 0: mode = \"all\" elif", "openslides_backend.models.checker import Checker, CheckException def main() -> int: files =", "with open(f) as data: try: Checker( json.load(data), mode=mode, ).run_check() except", "print(f\"Check for {f} failed:\\n\", e) failed = True else: print(f\"Check", "for {f} successful.\") return 1 if failed else 0 if", "import sys from openslides_backend.models.checker import Checker, CheckException def main() ->", "False for f in files: with open(f) as data: try:", "len(modes) > 1: print(f\"You can only choose one mode of", "of {', '.join(possible_modes)}.\") exit(1) else: mode = modes[0] if len(modes):", "not in possible_modes] failed = False for f in files:", "in possible_modes] failed = False for f in files: with", "possible_modes if mode in files) if len(modes) == 0: mode", "as e: print(f\"Check for {f} failed:\\n\", e) failed = True", "\"all\" elif len(modes) > 1: print(f\"You can only choose one", "files = [x for x in files if x not", "possible_modes = tuple(f\"--{mode}\" for mode in Checker.modes) modes = tuple(mode[2:]", "for mode in possible_modes if mode in files) if len(modes)", "in files: with open(f) as data: try: Checker( json.load(data), mode=mode,", "import json import sys from openslides_backend.models.checker import Checker, CheckException def", "files specified.\") return 1 possible_modes = tuple(f\"--{mode}\" for mode in", "x in files if x not in possible_modes] failed =", "1 possible_modes = tuple(f\"--{mode}\" for mode in Checker.modes) modes =", "'.join(possible_modes)}.\") exit(1) else: mode = modes[0] if len(modes): files =", "mode in Checker.modes) modes = tuple(mode[2:] for mode in possible_modes", "int: files = sys.argv[1:] if not files: print(\"No files specified.\")", "= [x for x in files if x not in", "not files: print(\"No files specified.\") return 1 possible_modes = tuple(f\"--{mode}\"", "e) failed = True else: print(f\"Check for {f} successful.\") return", "one mode of {', '.join(possible_modes)}.\") exit(1) else: mode = modes[0]", "from openslides_backend.models.checker import Checker, CheckException def main() -> int: files", "files: with open(f) as data: try: Checker( json.load(data), mode=mode, ).run_check()", "= modes[0] if len(modes): files = [x for x in", "files) if len(modes) == 0: mode = \"all\" elif len(modes)", "e: print(f\"Check for {f} failed:\\n\", e) failed = True else:", "print(\"No files specified.\") return 1 possible_modes = tuple(f\"--{mode}\" for mode", "tuple(f\"--{mode}\" for mode in Checker.modes) modes = tuple(mode[2:] for mode", "mode in files) if len(modes) == 0: mode = \"all\"", "len(modes) == 0: mode = \"all\" elif len(modes) > 1:", "> 1: print(f\"You can only choose one mode of {',", "main() -> int: files = sys.argv[1:] if not files: print(\"No", "json.load(data), mode=mode, ).run_check() except CheckException as e: print(f\"Check for {f}", "Checker.modes) modes = tuple(mode[2:] for mode in possible_modes if mode", "return 1 if failed else 0 if __name__ == \"__main__\":", "for mode in Checker.modes) modes = tuple(mode[2:] for mode in", "in possible_modes if mode in files) if len(modes) == 0:", "= True else: print(f\"Check for {f} successful.\") return 1 if", "can only choose one mode of {', '.join(possible_modes)}.\") exit(1) else:", "modes = tuple(mode[2:] for mode in possible_modes if mode in", "for {f} failed:\\n\", e) failed = True else: print(f\"Check for", "failed = True else: print(f\"Check for {f} successful.\") return 1", "in files if x not in possible_modes] failed = False", "== 0: mode = \"all\" elif len(modes) > 1: print(f\"You", "open(f) as data: try: Checker( json.load(data), mode=mode, ).run_check() except CheckException", "only choose one mode of {', '.join(possible_modes)}.\") exit(1) else: mode", "print(f\"You can only choose one mode of {', '.join(possible_modes)}.\") exit(1)", ").run_check() except CheckException as e: print(f\"Check for {f} failed:\\n\", e)", "if x not in possible_modes] failed = False for f", "len(modes): files = [x for x in files if x", "if mode in files) if len(modes) == 0: mode =", "Checker( json.load(data), mode=mode, ).run_check() except CheckException as e: print(f\"Check for", "mode = \"all\" elif len(modes) > 1: print(f\"You can only", "{f} failed:\\n\", e) failed = True else: print(f\"Check for {f}", "json import sys from openslides_backend.models.checker import Checker, CheckException def main()", "tuple(mode[2:] for mode in possible_modes if mode in files) if", "failed = False for f in files: with open(f) as", "failed:\\n\", e) failed = True else: print(f\"Check for {f} successful.\")", "[x for x in files if x not in possible_modes]", "= tuple(mode[2:] for mode in possible_modes if mode in files)", "= sys.argv[1:] if not files: print(\"No files specified.\") return 1", "Checker, CheckException def main() -> int: files = sys.argv[1:] if", "{', '.join(possible_modes)}.\") exit(1) else: mode = modes[0] if len(modes): files" ]
[ "import load_memfile_configs from utils.server import plain_response from sanic import response", "import plain_response from sanic import response def get_mappedfile_configs(): cfgs =", "sanic import response def get_mappedfile_configs(): cfgs = load_memfile_configs() return response.json(plain_response(cfgs,", "load_memfile_configs from utils.server import plain_response from sanic import response def", "from sanic import response def get_mappedfile_configs(): cfgs = load_memfile_configs() return", "from utils.server import plain_response from sanic import response def get_mappedfile_configs():", "import response def get_mappedfile_configs(): cfgs = load_memfile_configs() return response.json(plain_response(cfgs, 0),", "plain_response from sanic import response def get_mappedfile_configs(): cfgs = load_memfile_configs()", "utils.server import plain_response from sanic import response def get_mappedfile_configs(): cfgs", "utils.data import load_memfile_configs from utils.server import plain_response from sanic import", "get_mappedfile_configs(): cfgs = load_memfile_configs() return response.json(plain_response(cfgs, 0), status=200) def created_mapped_file():", "= load_memfile_configs() return response.json(plain_response(cfgs, 0), status=200) def created_mapped_file(): pass def", "return response.json(plain_response(cfgs, 0), status=200) def created_mapped_file(): pass def delete_mapped_file(): pass", "cfgs = load_memfile_configs() return response.json(plain_response(cfgs, 0), status=200) def created_mapped_file(): pass", "def get_mappedfile_configs(): cfgs = load_memfile_configs() return response.json(plain_response(cfgs, 0), status=200) def", "response def get_mappedfile_configs(): cfgs = load_memfile_configs() return response.json(plain_response(cfgs, 0), status=200)", "from utils.data import load_memfile_configs from utils.server import plain_response from sanic", "load_memfile_configs() return response.json(plain_response(cfgs, 0), status=200) def created_mapped_file(): pass def delete_mapped_file():", "<filename>utils/mgmt.py from utils.data import load_memfile_configs from utils.server import plain_response from" ]
[ "num_lesion_slices, np.shape(mask_img)[-1] if __name__ == '__main__': # for phasename in", "for phasename in ['NC', 'ART', 'PV']: # convert_dicomseries2mhd( # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/'", "int(np.mean(x)) centroid_y = int(np.mean(y)) centroid_z = int(np.mean(z)) return centroid_x, centroid_y,", "image, ROI_Image gc.collect() def compress22dim(image): ''' 将一个矩阵如果可能,压缩到三维的空间 ''' shape =", "res_dict = {} res_dict['voxel_spacing'] = [header_obj['srow_x'][0], header_obj['srow_y'][1], header_obj['srow_z'][2]] img_arr =", "def save_mhd_image(image, file_name): header = itk.GetImageFromArray(image) itk.WriteImage(header, file_name) # 根据文件名返回期项名", "image_arr image_arr_rgb[:, :, 1] = image_arr image_arr_rgb[:, :, 2] =", ":] = 0 return np.concatenate([ np.expand_dims(kernel_whole, axis=0), np.expand_dims(kernel_left, axis=0), np.expand_dims(kernel_right,", "np.min(ys) max_ys = np.max(ys) weakly_label_mask[min_xs: max_xs, min_ys: max_ys] = 1", "return after_zoom def preprocessing_agumentation(image, size_training): image = np.array(image) # numpy_clip", "# print np.max(binary_seg_slice) masks.append(binary_seg_slice) labeled_mask = label(binary_seg_slice) weakly_label_mask = np.zeros_like(binary_seg_slice,", "multi phase # image_dir = '/home/give/Documents/dataset/LiverLesionDetection_Splited/0' # static_pixel_num(image_dir, 'PV') statics_num_slices_lesion('/media/give/CBMIR/ld/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2')", "return read_nil(file_path) if file_path.endswith('.mhd'): return read_mhd_image(file_path) print('the format of image", "min_ys = np.min(ys) max_ys = np.max(ys) weakly_label_mask[min_xs: max_xs, min_ys: max_ys]", "os from glob import glob import scipy import cv2 from", "image -= minval interv = maxval - minval # print('static", "channel: # z = channel - 1 # next_slice.append(volume[:, :,", "dtype=np.int) zooms = desired_size / np.array(image[:, :, 0].shape, dtype=np.float) print(zooms)", "1 => biliniear interpolation return after_zoom def preprocessing_agumentation(image, size_training): image", "= desired_size / np.array(image[:, :, 0].shape, dtype=np.float) print(zooms) after_zoom =", "= os.path.join(xml_save_dir, file_name + '.xml') gt_save_path = os.path.join(evulate_gt_dir, file_name +", "return new_image def read_image_file(file_path): if file_path.endswith('.nii'): return read_nil(file_path) if file_path.endswith('.mhd'):", "= doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node", "== 2, np.uint8) # print np.max(binary_seg_slice) masks.append(binary_seg_slice) labeled_mask = label(binary_seg_slice)", "suffix_name='npy'): target_mask = None mhd_images = [] for phase_name in", "sub_name)) for name in names: cur_slice_dir = os.path.join(image_dir, sub_name, name)", "= np.sum(has_lesion) print os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1] if __name__ == '__main__':", "# def test_convert2depthfirst(): # zeros = np.zeros([100, 100, 30]) #", "max_v - min_v volume = (volume - min_v) / interv", "def resize_image(image, size): image = Image.fromarray(np.asarray(image, np.uint8)) return image.resize((size, size))", "np.max(xs) ROI = image_arr_rgb[miny - 1:maxy + 1, minx -", "extract_bboxs_mask_from_mask from config import pixel2type, type2pixel for sub_name in ['train',", "import disk, dilation import nipy import os from glob import", "os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image is ', np.shape(mhd_image), np.min(mhd_image),", "scale down to 0 - 2 # image /= (interv", "pydicom import numpy as np from PIL import Image, ImageDraw", "idx ''' sum_res = np.sum(np.sum(mask_image, axis=1), axis=1) return np.argmax(sum_res) #", "1 pos_slice_num = np.sum(np.sum(np.sum(seg == 2, axis=0), axis=0) != 0)", "image_draw = ImageDraw.Draw(image) [ys, xs] = np.where(mask_image != 0) miny", "phasename in phasenames: if file_name.find(phasename) != -1: return phasename #", "= read_nii_with_header(volume_path) # volume = np.transpose(volume, [1, 0, 2]) volume", "# print(np.shape(image)) # conver2JPG single phase # image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0'", "if phase_name == target_phase: target_mask = mask_image print(np.shape(mhd_images)) mask_image =", "slice_num = int(z_axis / z_axis_case) if slice_num == 0: slice_num", "1, 2, 3, 4] def get_voxel_size(file_path): load_image_obj = nipy.load_image(file_path) header", "os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image))", "line = '%s %d %d %d %d\\n' % ('Cyst', min_y,", "= np.max(ys) minx = np.min(xs) maxx = np.max(xs) ROI =", "= max_v - min_v volume = (volume - min_v) /", "-200. # max_v = 180 # min_v = -70 volume[volume", "test_convert2depthfirst() ''' 将[d, w, h]reshape为[w, h, d] ''' def convert2depthlastest(image):", "depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image != 1] = 0 xs,", "= np.max(ys) weakly_label_mask[min_xs: max_xs, min_ys: max_ys] = 1 liver_masks.append(np.asarray(seg_slice ==", "= os.path.basename(os.path.dirname(slice_dir)) phase_name = ''.join(phasenames) save_path = os.path.join(save_dir, phase_name, dataset_name,", "folder_node = doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name =", "mask_image = np.asarray(np.squeeze(mask_image), np.uint8) max_v = 300. min_v = -350.", "if len(shape) == 3: return np.squeeze(image) return image def extract_ROI(image,", "return_phasename(file_name): phasenames = ['NC', 'ART', 'PV'] for phasename in phasenames:", "min_v = np.min(volume) max_v = np.max(volume) interv = max_v -", "scale down to 0 - 2 image /= (interv /", "value in list(pixel_value_set): static_res[value] += np.sum(mask == value) print(static_res) def", "plt plt.figure(\"Image\") # 这里必须加 cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img, cmap='gray') plt.axis('on') plt.title('image')", "# save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' # phase_name = 'NC' # MICCAI2018_Iterator(image_dir,", "0, 1: 0, 2: 0, 3: 0, 4: 0, 5:", "idname_dict = return_type_idname() return idname_dict[typeid] # 根据病灶类型的name返回id的字符串 def return_typeid_byname(typename): nameid_dict", "= np.asarray(seg_slice == 2, np.uint8) # print np.max(binary_seg_slice) masks.append(binary_seg_slice) labeled_mask", "'ART', 'PV'] for phasename in phasenames: if file_name.find(phasename) != -1:", "z_axis=5.0, short_edge=64): ''' 将nii转化成PNG :param volume_path: nii的路径 :param seg_path: :return:", "% phase_name))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image =", "for i in range(channel): seg_slice = seg[:, :, i] mid_slice", "slice_num / 2 # pre_end = i # for j", "np.uint8) for idx in range(1, np.max(labeled_mask) + 1): xs, ys", "f: f.write(doc.toprettyxml(indent='\\t', encoding='utf-8')) line = '%s %d %d %d %d\\n'", "255. image = np.clip(image, c_minimum, c_maximum) interv = float(c_maximum -", "= 180 image = image + 70 return np.array(image) #", "== 'jpg': mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image", "# pre_end = i # for j in range(1, slice_num", "'%s %d %d %d %d\\n' % ('Cyst', min_y, min_x, max_y,", "% target_phase))[0] mask_image = read_mhd_image(mhd_mask_path) min_xs, min_ys, max_xs, max_ys, names,", "# image.show() from scipy import ndimage image = ndimage.binary_fill_holes(image).astype(np.uint8) return", "0, 5: 0 } from convert2jpg import extract_bboxs_mask_from_mask from config", "= int(z_axis / z_axis_case) if slice_num == 0: slice_num =", "ImageDraw.Draw(image) [ys, xs] = np.where(mask_image != 0) miny = np.min(ys)", "= np.sum(np.sum(np.sum(seg == 2, axis=0), axis=0) != 0) total_slice_num =", "__name__ == '__main__': # for phasename in ['NC', 'ART', 'PV']:", "max_y = np.max(ys) object_node = doc.createElement('object') root_node.appendChild(object_node) name_node = doc.createElement('name')", "= (volume - min_v) / interv z_axis_case = header['voxel_spacing'][-1] slice_num", "# order = 1 => biliniear interpolation return after_zoom def", "lines = [] lines.append(line) with open(gt_save_path, 'w') as f: f.writelines(lines)", "evulate doc = Document() root_node = doc.createElement('annotation') doc.appendChild(root_node) folder_name =", "list(np.shape(mask_image)) if len(mask_image_shape) == 3: mask_image = mask_image[1, :, :]", "* pos_slice_num) / total_slice_num # 正样本是负样本的 if neg_rate > 1.0:", "np.min(xs) maxx = np.max(xs) ROI = image_arr_rgb[miny - 1:maxy +", "imgs = [] names = [] masks = [] tumor_weakly_masks", "miny = np.min(ys) maxy = np.max(ys) minx = np.min(xs) maxx", "max_v - min_v mhd_image = (mhd_image - min_v) / interv", "= 'FNH' res[2] = 'HCC' res[3] = 'HEM' res[4] =", "= pydicom.read_file(file_name) image = header.pixel_array image = header.RescaleSlope * image", "cv2 import numpy as np kernel = np.ones((kernel_size, kernel_size), np.uint8)", "= -300. top = 500. image = np.clip(image, bottom, top)", "tumor_weakly_masks, np.uint8) def statics_num_slices_lesion(nii_dir): ''' 统计每个case,有多少slice具有病灶 :param nii_dir: :return: '''", "nipy import os from glob import glob import scipy import", "z]) if (i - 1) < 0: pre_slice = np.expand_dims(volume[:,", "image # 读取mhd文件 def read_mhd_image(file_path, rejust=False): header = itk.ReadImage(file_path) image", "names: # path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name) # image = read_nil(path)", "def image_expand(mask_image, r): # return dilation(mask_image, disk(r)) ''' 将形式如(512, 512)格式的图像转化为(1,", "return close_r def open_operation(slice_image, kernel_size=3): opening = cv2.morphologyEx(slice_image, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE,", "single phase # image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' # save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase'", "shape[1]]) for i in range(shape[2]): new_image[i, :, :] = image[:,", "= cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE, kernel) return close_r def open_operation(slice_image, kernel_size=3): opening", "i] = scipy.ndimage.zoom(image[:, :, i], zooms, order=1) # order =", "= max_v mhd_image[mhd_image < min_v] = min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image", "return nameid_dict[typename] # 填充图像 def fill_region(image): # image.show() from scipy", "size_training): image = np.array(image) # numpy_clip bottom = -300. top", "short_edge=64): ''' 将nii转化成PNG :param volume_path: nii的路径 :param seg_path: :return: '''", "down to 0 - 2 image /= (interv / 2)", "= doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node", "kernel_size)) image = cv2.erode(img, kernel) return image # 图像膨胀 #", "= [] masks = [] tumor_weakly_masks = [] liver_masks =", "sub_name, name) execute_func(cur_slice_dir, *parameters) def dicom2jpg_singlephase(slice_dir, save_dir, phase_name='PV'): mhd_image_path =", "mask_img = read_nii(mask_nii_path) has_lesion = np.asarray(np.sum(np.sum(mask_img == 2, axis=0), axis=0)>0,", "phasename, # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename + '.mhd' # ) #", "图像膨胀 # def image_expand(image, size): # def find_significant_layer(mask_image): ''' 找到显著层", "kernel_left[:, half_size + 1:] = 0 kernel_right = np.copy(kernel_whole) kernel_right[:,", "'ART', 'PV']: # convert_dicomseries2mhd( # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename, # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/'", "180] = 180 image_arr = image_arr + 70 shape =", "'/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' # phase_name = 'NC' # MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir, phase_name)", "evulate_gt_dir = os.path.join(save_dir, phase_name, dataset_name+'_gt') if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path", "< 0: pre_slice = np.expand_dims(volume[:, :, i], axis=0) else: pre_slice", "2, 0]) mhd_image = mhd_images elif suffix_name == 'npy': mhd_images", "c_maximum = 500. s_maximum = 255. image = np.clip(image, c_minimum,", "return_type_nameid(): res = {} res['CYST'] = 0 res['FNH'] = 1", "# return dilation(mask_image, disk(r)) ''' 将形式如(512, 512)格式的图像转化为(1, 512, 512)形式的图片 '''", "import cv2 import numpy as np kernel = np.ones((kernel_size, kernel_size),", "= np.array(image) # numpy_clip bottom = -300. top = 500.", "get_kernel_filters(kernel_size): ''' 返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param kernel_size: :return: [5, kernel_size, kernel_size]", "= 'HEM' res[4] = 'METS' return res # 根据病灶类型的ID返回类型的字符串 def", "total_slice_num) neg_rate = (3.0 * pos_slice_num) / total_slice_num # 正样本是负样本的", "kernel_size: :return: [5, kernel_size, kernel_size] ''' kernel_whole = np.ones([kernel_size, kernel_size],", "del image, ROI_Image gc.collect() def compress22dim(image): ''' 将一个矩阵如果可能,压缩到三维的空间 ''' shape", "100, 30]) # after_zeros = convert2depthfirst(zeros) # print np.shape(after_zeros) #", "180] = 180 image = image + 70 return np.array(image)", "'wb') as f: f.write(doc.toprettyxml(indent='\\t', encoding='utf-8')) line = '%s %d %d", "mask_image[mask_image != 1] = 0 xs, ys = np.where(mask_image ==", "shape[1], 3]) image_arr_rgb[:, :, 0] = image_arr image_arr_rgb[:, :, 1]", "217784361, 1: 1392043, 2: 209128, 3: 1486676, 4: 458278, 5:", "in ['NC', 'ART', 'PV']: # convert_dicomseries2mhd( # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename,", "# path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name) # image = read_nil(path) #", "name) mhd_mask_path = glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd' % target_phase))[0] mask_image = read_mhd_image(mhd_mask_path)", "phase_name = 'NC' # MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir, phase_name) # conver2JPG", "name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult')", "axis=0) # pre_slice = np.mean(pre_slice, axis=0, keepdims=True) # next_slice =", "# image = read_nil(path) # print(np.shape(image)) # conver2JPG single phase", "kernel_whole = np.ones([kernel_size, kernel_size], np.uint8) half_size = kernel_size // 2", "file_path.endswith('.nii'): return read_nil(file_path) if file_path.endswith('.mhd'): return read_mhd_image(file_path) print('the format of", "return new_image # def test_convert2depthfirst(): # zeros = np.zeros([100, 100,", "= Document() root_node = doc.createElement('annotation') doc.appendChild(root_node) folder_name = os.path.basename(save_dir) +", "3: 0, 4: 0, 5: 0 } from convert2jpg import", "glob import scipy import cv2 from xml.dom.minidom import Document typenames", "= next_start + j # if z >= channel: #", "'_Mask*.mhd'))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image),", "header = read_nii_with_header(volume_path) # volume = np.transpose(volume, [1, 0, 2])", "= 4 return res # 返回病灶类型ID和名称的字典类型的数据 key是typeid value是typename def return_type_idname():", "np.transpose(np.concatenate([pre_slice, mid_slice, next_slice], axis=0), axes=[1, 2, 0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-'", "= i # for j in range(1, slice_num + 1):", "np.asarray(seg_slice == 2, np.uint8) # print np.max(binary_seg_slice) masks.append(binary_seg_slice) labeled_mask =", "depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image != 1]", "seg[:, :, i] mid_slice = np.expand_dims(volume[:, :, i], axis=0) pre_slice", "= 180 # min_v = -70 volume[volume > max_v] =", "= Image.fromarray(np.asarray(ROI, np.uint8)) for index, y in enumerate(ys): image_draw.point([xs[index], y],", "'w') as f: f.writelines(lines) f.close() def dicom2jpg_multiphase(slice_dir, save_dir, phasenames=['NC', 'ART',", "axis=0) mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image =", "[] lines.append(line) with open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def", "ROI = image_arr_rgb[miny - 1:maxy + 1, minx - 1:maxx", "# image /= (interv / 2) image = np.asarray(image, np.float32)", "MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir, phase_name) # conver2JPG multi phase # image_dir", "fill=(255, 0, 0)) if save_path is None: image.show() else: image.save(save_path)", "= 255. image -= minval interv = maxval - minval", "import Document typenames = ['CYST', 'FNH', 'HCC', 'HEM', 'METS'] typeids", "def show_image(image): img = np.asarray(image, np.uint8) import matplotlib.pyplot as plt", "= 0 xs, ys = np.where(mask_image == 1) print(xs, ys)", "typeids = [0, 1, 2, 3, 4] def get_voxel_size(file_path): load_image_obj", "size_node.appendChild(depth_node) mask_image[mask_image != 1] = 0 xs, ys = np.where(mask_image", "open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def static_pixel_num(image_dir, target_phase='PV'): #", "matplotlib.pyplot as plt plt.figure(\"Image\") # 这里必须加 cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img, cmap='gray')", "return image # 读取mhd文件 def read_mhd_image(file_path, rejust=False): header = itk.ReadImage(file_path)", "i in range(shape[0]): new_image[:, :, i] = image[i, :, :]", "mask_image = mask_image[1, :, :] print('the mask image shape is", "list(np.shape(mhd_image)) size_node = doc.createElement('size') root_node.appendChild(size_node) width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node", "pre_slice = [] # pre_end = i - slice_num /", "return ds.PatientID # 返回病灶类型和ID的字典类型的数据 key是typename value是typeid def return_type_nameid(): res =", "= np.mean(pre_slice, axis=0, keepdims=True) # next_slice = np.mean(next_slice, axis=0, keepdims=True)", "imgs.append( np.transpose(np.concatenate([pre_slice, mid_slice, next_slice], axis=0), axes=[1, 2, 0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] +", "0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-' + str(i)) binary_seg_slice = np.asarray(seg_slice ==", "keepdims=True) # next_slice = np.mean(next_slice, axis=0, keepdims=True) imgs.append( np.transpose(np.concatenate([pre_slice, mid_slice,", "phasename # 读取DICOM文件中包含的病例ID信息 def read_patientId(dicom_file_path): ds = pydicom.read_file(dicom_file_path) return ds.PatientID", "= int(np.mean(x)) centroid_y = int(np.mean(y)) centroid_z = int(np.mean(z)) return centroid_x,", "object_node = doc.createElement('object') root_node.appendChild(object_node) name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node", "SimpleITK as itk import pydicom import numpy as np from", "rejust=False): header = itk.ReadImage(file_path) image = np.array(itk.GetArrayFromImage(header)) if rejust: image[image", "= read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_image = np.expand_dims(mhd_image, axis=2)", "doc = Document() root_node = doc.createElement('annotation') doc.appendChild(root_node) folder_name = os.path.basename(save_dir)", "== 2, axis=0), axis=0) != 0) total_slice_num = np.shape(seg)[-1] print('pos_slice_num", "glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0] mhd_mask_path = glob(os.path.join(slice_dir, phase_name + '_Mask*.mhd'))[0] mhd_image =", "not support') assert False max_v = 300. min_v = -350.", "image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' # save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' # phase_name =", "= 3 res['METS'] = 4 return res # 返回病灶类型ID和名称的字典类型的数据 key是typeid", "= np.min(xs) min_y = np.min(ys) max_x = np.max(xs) max_y =", "max_v = 180 # min_v = -70 volume[volume > max_v]", "xml_save_path = os.path.join(xml_save_dir, file_name + '.xml') gt_save_path = os.path.join(evulate_gt_dir, file_name", "doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name = file_name +", "save_path): data = read_dicom_series(dicom_series_dir) save_mhd_image(data, save_path) # 读取单个DICOM文件 def read_dicom_file(file_name):", ":] print('the mask image shape is ', np.shape(mask_image)) if suffix_name", "= np.asarray(np.squeeze(mhd_image), np.float32) mhd_image = np.expand_dims(mhd_image, axis=2) mhd_image = np.concatenate([mhd_image,", "np.asarray(image, np.uint8) import matplotlib.pyplot as plt plt.figure(\"Image\") # 这里必须加 cmap='gray'", "-70] = -70 image[image > 180] = 180 image =", "= cv2.dilate(img, kernel) return image def image_erode(img, kernel_size=5): kernel =", "gt_save_path = os.path.join(evulate_gt_dir, file_name + '.txt') # for evulate doc", "1) < 0: pre_slice = np.expand_dims(volume[:, :, i], axis=0) else:", "2, 3, 4] def get_voxel_size(file_path): load_image_obj = nipy.load_image(file_path) header =", "1] def resize_image(image, size): image = Image.fromarray(np.asarray(image, np.uint8)) return image.resize((size,", "= [size_training, size_training] desired_size = np.asarray(desired_size, dtype=np.int) zooms = desired_size", "np.mean(pre_slice, axis=0, keepdims=True) # next_slice = np.mean(next_slice, axis=0, keepdims=True) imgs.append(", "kernel_right[:, :half_size] = 0 kernel_top = np.copy(kernel_whole) kernel_top[half_size + 1:,", "np.float32), axes=[1, 2, 0]) mhd_image = mhd_images else: print('the suffix", "''' 将nii转化成PNG :param volume_path: nii的路径 :param seg_path: :return: ''' from", "minval # scale down to 0 - 2 image /=", "= {} res_dict['voxel_spacing'] = [header_obj['srow_x'][0], header_obj['srow_y'][1], header_obj['srow_z'][2]] img_arr = img_obj.get_data()", "def read_patientId(dicom_file_path): ds = pydicom.read_file(dicom_file_path) return ds.PatientID # 返回病灶类型和ID的字典类型的数据 key是typename", "object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y)))", "kernel_size)) close_r = cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE, kernel) return close_r def open_operation(slice_image,", "# pre_slice = np.mean(pre_slice, axis=0, keepdims=True) # next_slice = np.mean(next_slice,", "kernel_bottom[:half_size, :] = 0 return np.concatenate([ np.expand_dims(kernel_whole, axis=0), np.expand_dims(kernel_left, axis=0),", "image = cv2.dilate(img, kernel) return image def image_erode(img, kernel_size=5): kernel", "# 根据病灶类型的ID返回类型的字符串 def return_typename_byid(typeid): idname_dict = return_type_idname() return idname_dict[typeid] #", "img = np.asarray(image, np.uint8) import matplotlib.pyplot as plt plt.figure(\"Image\") #", "/ 2) # zoom desired_size = [size_training, size_training] desired_size =", "'/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename, # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename + '.mhd' #", "[] tumor_weakly_masks = [] liver_masks = [] i = slice_num", "test_convert2depthfirst(): # zeros = np.zeros([100, 100, 30]) # after_zeros =", "return image[xs_min: xs_max + 1, ys_min: ys_max + 1] def", "ys = np.where(mask_image == 1) print(xs, ys) min_x = np.min(xs)", "value) print(static_res) def convertCase2PNGs(volume_path, seg_path, save_dir=None, z_axis=5.0, short_edge=64): ''' 将nii转化成PNG", "= np.asarray(np.squeeze(mask_image), np.uint8) if phase_name == target_phase: target_mask = mask_image", "kernel_size] ''' kernel_whole = np.ones([kernel_size, kernel_size], np.uint8) half_size = kernel_size", "os.path.join(save_dir, phase_name, dataset_name, file_name+'.jpg') if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape", "print(np.shape(image)) # conver2JPG single phase # image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' #", "统计每个case,有多少slice具有病灶 :param nii_dir: :return: ''' mask_nii_paths = glob(os.path.join(nii_dir, 'segmentation-*.nii')) for", "= nipy.load_image(file_path) header_obj = img_obj.header res_dict = {} res_dict['voxel_spacing'] =", "255. image -= minval interv = maxval - minval #", "np.copy(kernel_whole) kernel_left[:, half_size + 1:] = 0 kernel_right = np.copy(kernel_whole)", "-300. top = 500. image = np.clip(image, bottom, top) #", "max_v volume[volume < min_v] = min_v volume -= np.mean(volume) min_v", "kernel = np.ones((kernel_size, kernel_size), np.uint8) erosion = cv2.erode(img, kernel, iterations=1)", "next_slice = [] # next_start = i + slice_num /", "save_dir, phase_name='PV'): mhd_image_path = glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0] mhd_mask_path = glob(os.path.join(slice_dir, phase_name", "print np.max(binary_seg_slice) masks.append(binary_seg_slice) labeled_mask = label(binary_seg_slice) weakly_label_mask = np.zeros_like(binary_seg_slice, np.uint8)", "= np.clip(image, c_minimum, c_maximum) interv = float(c_maximum - c_minimum) image", "image_arr_rgb[miny - 1:maxy + 1, minx - 1:maxx + 1,", "np.min(ys) maxy = np.max(ys) minx = np.min(xs) maxx = np.max(xs)", "# pre_slice.append(volume[:, :, z]) if (i - 1) < 0:", "False max_v = 300. min_v = -350. mhd_image[mhd_image > max_v]", "cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE, kernel) return close_r def open_operation(slice_image, kernel_size=3): opening =", "axis=0), axes=[1, 2, 0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-' + str(i)) binary_seg_slice", "suffix_name == 'npy': mhd_images = np.concatenate(np.asarray(mhd_images, np.float), axis=0) mhd_images =", "!= 1] = 0 xs, ys = np.where(mask_image == 1)", "typenames = ['CYST', 'FNH', 'HCC', 'HEM', 'METS'] typeids = [0,", "max_y, max_x) print(line) lines = [] lines.append(line) with open(gt_save_path, 'w')", "image = read_nil(path) # print(np.shape(image)) # conver2JPG single phase #", "encoding='utf-8')) line = '%s %d %d %d %d\\n' % ('Cyst',", "# 读取mhd文件 def read_mhd_image(file_path, rejust=False): header = itk.ReadImage(file_path) image =", "convert_dicomseries2mhd(dicom_series_dir, save_path): data = read_dicom_series(dicom_series_dir) save_mhd_image(data, save_path) # 读取单个DICOM文件 def", "lines.append(line) with open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def static_pixel_num(image_dir,", "''' 将形式如(512, 512)格式的图像转化为(1, 512, 512)形式的图片 ''' def expand23D(mask_image): shape =", "print('the shape of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) #cv2.imwrite(save_path,", "= i - slice_num / 2 # pre_end = i", "5: 705482} # {0: 1.0, 156, 1041, 146, 475, 308}", "scaler 0', interv) # scale down to 0 - 2", "0 xs, ys = np.where(mask_image == 1) min_x = np.min(xs)", "1): # z = next_start + j # if z", "- min_v) / interv z_axis_case = header['voxel_spacing'][-1] slice_num = int(z_axis", "= nipy.load_image(file_path) header = load_image_obj.header x_size = header['srow_x'][0] y_size =", "- min_v mhd_image = (mhd_image - min_v) / interv file_name", "'ART', 'PV'], target_phase='PV', suffix_name='npy'): target_mask = None mhd_images = []", "image is not support in this version') return None def", "4] def get_voxel_size(file_path): load_image_obj = nipy.load_image(file_path) header = load_image_obj.header x_size", "Image.fromarray(np.asarray(image_arr, np.uint8)) image.save(save_path) def show_image(image): img = np.asarray(image, np.uint8) import", "slice_num + 1 pos_slice_num = np.sum(np.sum(np.sum(seg == 2, axis=0), axis=0)", "is not support in this version') return None def processing(image,", ":, i] = image[i, :, :] return new_image def read_image_file(file_path):", "np.uint8)) tumor_weakly_masks.append(weakly_label_mask) # i += 1 return np.asarray(imgs, np.float32), np.asarray(masks,", "def return_type_nameid(): res = {} res['CYST'] = 0 res['FNH'] =", "1.0 for i in range(channel): seg_slice = seg[:, :, i]", "# z = next_start + j # if z >=", "2 # pre_end = i # for j in range(1,", "difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node", "/ interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) save_path =", "- min_v) / interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir))", "# 读取单个DICOM文件 def read_dicom_file(file_name): header = pydicom.read_file(file_name) image = header.pixel_array", "interpolation return after_zoom def preprocessing_agumentation(image, size_training): image = np.array(image) #", "'HEM' res[4] = 'METS' return res # 根据病灶类型的ID返回类型的字符串 def return_typename_byid(typeid):", "with open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def static_pixel_num(image_dir, target_phase='PV'):", "70 return np.array(image) # 保存mhd文件 def save_mhd_image(image, file_name): header =", "', pos_slice_num, total_slice_num) neg_rate = (3.0 * pos_slice_num) / total_slice_num", "mhd_image], axis=2) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) max_v = 300. min_v", "itk.GetImageFromArray(image) itk.WriteImage(header, file_name) # 根据文件名返回期项名 def return_phasename(file_name): phasenames = ['NC',", "read_nii_with_header(volume_path) # volume = np.transpose(volume, [1, 0, 2]) volume =", "file_name): header = itk.GetImageFromArray(image) itk.WriteImage(header, file_name) # 根据文件名返回期项名 def return_phasename(file_name):", "min_x, max_y, max_x) print(line) lines = [] lines.append(line) with open(gt_save_path,", "/ interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) phase_name =", "= itk.GetImageFromArray(image) itk.WriteImage(header, file_name) # 根据文件名返回期项名 def return_phasename(file_name): phasenames =", "np.sum(mask == value) print(static_res) def convertCase2PNGs(volume_path, seg_path, save_dir=None, z_axis=5.0, short_edge=64):", "np.expand_dims(volume[:, :, i+1], axis=0) # pre_slice = np.mean(pre_slice, axis=0, keepdims=True)", "numpy as np kernel = np.ones((kernel_size, kernel_size), np.uint8) erosion =", "mhd_images elif suffix_name == 'npy': mhd_images = np.concatenate(np.asarray(mhd_images, np.float), axis=0)", "np.asarray(np.sum(np.sum(mask_img == 2, axis=0), axis=0)>0, np.bool) num_lesion_slices = np.sum(has_lesion) print", "0, 3: 0, 4: 0, 5: 0 } from convert2jpg", "/ np.array(image[:, :, 0].shape, dtype=np.float) print(zooms) after_zoom = np.zeros([size_training, size_training,", "2: mask_image = np.expand_dims(mask_image, axis=0) print('after expand23D', np.shape(mask_image)) return mask_image", "[5, kernel_size, kernel_size] ''' kernel_whole = np.ones([kernel_size, kernel_size], np.uint8) half_size", "neg_rate = 1.0 for i in range(channel): seg_slice = seg[:,", "target_phase: target_mask = mask_image print(np.shape(mhd_images)) mask_image = target_mask mask_image_shape =", "print(xs, ys) min_x = np.min(xs) min_y = np.min(ys) max_x =", "reader.Execute() image_array = itk.GetArrayFromImage(images) return image_array # 将DICOM序列转化成MHD文件 def convert_dicomseries2mhd(dicom_series_dir,", "for evulate doc = Document() root_node = doc.createElement('annotation') doc.appendChild(root_node) folder_name", "max_ys] = 1 liver_masks.append(np.asarray(seg_slice == 1, np.uint8)) tumor_weakly_masks.append(weakly_label_mask) # i", "img_obj = nipy.load_image(file_path) header_obj = img_obj.header res_dict = {} res_dict['voxel_spacing']", "shape = list(np.shape(mask_image)) if len(shape) == 2: mask_image = np.expand_dims(mask_image,", "ys = np.where(mask_image == 1) xs_min = np.min(xs) xs_max =", "> max_v] = max_v volume[volume < min_v] = min_v volume", "= header['voxel_spacing'][-1] slice_num = int(z_axis / z_axis_case) if slice_num ==", "# 这里必须加 cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img, cmap='gray') plt.axis('on') plt.title('image') plt.show() #", "'_ROI.jpg')) del image, ROI_Image gc.collect() def compress22dim(image): ''' 将一个矩阵如果可能,压缩到三维的空间 '''", "= ['NC', 'ART', 'PV'] for phasename in phasenames: if file_name.find(phasename)", "''' def find_centroid3D(image, flag): [x, y, z] = np.where(image ==", "(kernel_size, kernel_size))) return opening def get_kernel_filters(kernel_size): ''' 返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param", "np.shape(mask_image)) if suffix_name == 'jpg': mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1,", "in range(1, np.max(labeled_mask) + 1): xs, ys = np.where(labeled_mask ==", "dataset_name+'_gt') if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir, file_name +", "images = reader.Execute() image_array = itk.GetArrayFromImage(images) return image_array # 将DICOM序列转化成MHD文件", "for mask_nii_path in mask_nii_paths: mask_img = read_nii(mask_nii_path) has_lesion = np.asarray(np.sum(np.sum(mask_img", "return opening def get_kernel_filters(kernel_size): ''' 返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param kernel_size: :return:", "= os.path.join(save_dir, phase_name, dataset_name+'_gt') if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path =", "kernel_size // 2 kernel_left = np.copy(kernel_whole) kernel_left[:, half_size + 1:]", "cv2.erode(img, kernel, iterations=1) return erosion def image_expand(img, kernel_size=5): kernel =", "c_minimum = -300. c_maximum = 500. s_maximum = 255. image", "= image_arr_rgb[miny - 1:maxy + 1, minx - 1:maxx +", "bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x)))", "elif suffix_name == 'npy': mhd_images = np.concatenate(np.asarray(mhd_images, np.float), axis=0) mhd_images", "- 2 # image /= (interv / 2) image =", "axis=0), ], axis=0) def image_erode(img, kernel_size=5): import cv2 import numpy", "os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir, file_name + '.xml') gt_save_path =", "= cv2.erode(img, kernel, iterations=1) return erosion def image_expand(img, kernel_size=5): kernel", "xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax')", "import numpy as np from PIL import Image, ImageDraw import", "minval interv = maxval - minval # print('static scaler 0',", "max_v] = max_v volume[volume < min_v] = min_v volume -=", "volume_path: nii的路径 :param seg_path: :return: ''' from skimage.measure import label", "= doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node", "dicom2jpg_singlephase, save_dir, phase_name) # conver2JPG multi phase # image_dir =", "= doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node)", "0 res['FNH'] = 1 res['HCC'] = 2 res['HEM'] = 3", "convert_dicomseries2mhd( # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename, # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename +", "zooms = desired_size / np.array(image[:, :, 0].shape, dtype=np.float) print(zooms) after_zoom", "mask image shape is ', np.shape(mask_image)) if suffix_name == 'jpg':", "2 image /= (interv / 2) # zoom desired_size =", "(interv / 2) # zoom desired_size = [size_training, size_training] desired_size", "scipy import cv2 from xml.dom.minidom import Document typenames = ['CYST',", "[1, 0, 2]) volume = np.asarray(volume, np.float32) max_v = 250.", "== 1) xs_min = np.min(xs) xs_max = np.max(xs) ys_min =", "= [] i = slice_num + 1 pos_slice_num = np.sum(np.sum(np.sum(seg", "fill_region(image): # image.show() from scipy import ndimage image = ndimage.binary_fill_holes(image).astype(np.uint8)", "np.unique(mask) print pixel_value_set for value in list(pixel_value_set): static_res[value] += np.sum(mask", "= mhd_images else: print('the suffix name does not support') assert", "np.min(mhd_image) max_v = np.max(mhd_image) interv = max_v - min_v mhd_image", "+= 1 return np.asarray(imgs, np.float32), np.asarray(masks, np.uint8), np.asarray(liver_masks, np.uint8), np.asarray(", "if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir, file_name + '.xml')", "neg_rate > 1.0: neg_rate = 1.0 for i in range(channel):", "numpy_clip bottom = -300. top = 500. image = np.clip(image,", "1 return np.asarray(imgs, np.float32), np.asarray(masks, np.uint8), np.asarray(liver_masks, np.uint8), np.asarray( tumor_weakly_masks,", "name) execute_func(cur_slice_dir, *parameters) def dicom2jpg_singlephase(slice_dir, save_dir, phase_name='PV'): mhd_image_path = glob(os.path.join(slice_dir,", "def return_type_idname(): res = {} res[0] = 'CYST' res[1] =", ">= channel: next_slice = np.expand_dims(volume[:, :, i], axis=0) else: next_slice", "np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image = mhd_images elif suffix_name", "np.shape(mask_img)[-1] if __name__ == '__main__': # for phasename in ['NC',", "mask_image print(np.shape(mhd_images)) mask_image = target_mask mask_image_shape = list(np.shape(mask_image)) if len(mask_image_shape)", "h] ''' def convert2depthfirst(image): image = np.array(image) shape = np.shape(image)", "shape of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) cv2.imwrite(save_path, mhd_image", "kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size, kernel_size)) close_r = cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE, kernel)", "pixel2type, type2pixel for sub_name in ['train', 'val', 'test']: names =", "np.zeros([shape[2], shape[0], shape[1]]) for i in range(shape[2]): new_image[i, :, :]", "{ 0: 0, 1: 0, 2: 0, 3: 0, 4:", "slice_num == 0: slice_num = 1 seg = read_nii(seg_path) #", "= 1 liver_masks.append(np.asarray(seg_slice == 1, np.uint8)) tumor_weakly_masks.append(weakly_label_mask) # i +=", "np.asarray( tumor_weakly_masks, np.uint8) def statics_num_slices_lesion(nii_dir): ''' 统计每个case,有多少slice具有病灶 :param nii_dir: :return:", "name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0'))", "name) # image = read_nil(path) # print(np.shape(image)) # conver2JPG single", "= np.expand_dims(volume[:, :, i+1], axis=0) # pre_slice = np.mean(pre_slice, axis=0,", ":param volume_path: nii的路径 :param seg_path: :return: ''' from skimage.measure import", "['CYST', 'FNH', 'HCC', 'HEM', 'METS'] typeids = [0, 1, 2,", "2, axis=0), axis=0) != 0) total_slice_num = np.shape(seg)[-1] print('pos_slice_num is", "def MICCAI2018_Iterator(image_dir, execute_func, *parameters): ''' 遍历MICCAI2018文件夹的框架 :param execute_func: :return: '''", "保存mhd文件 def save_mhd_image(image, file_name): header = itk.GetImageFromArray(image) itk.WriteImage(header, file_name) #", "from config import pixel2type, type2pixel for sub_name in ['train', 'val',", ":] = 0 kernel_bottom = np.copy(kernel_whole) kernel_bottom[:half_size, :] = 0", "np.zeros([shape[1], shape[2], shape[0]]) for i in range(shape[0]): new_image[:, :, i]", "= os.path.join(save_dir, phase_name, dataset_name, file_name+'.jpg') if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the", "header = itk.ReadImage(file_path) image = np.array(itk.GetArrayFromImage(header)) if rejust: image[image <", "= os.listdir(os.path.join(image_dir, sub_name)) for name in names: cur_slice_dir = os.path.join(image_dir,", "res[3] = 'HEM' res[4] = 'METS' return res # 根据病灶类型的ID返回类型的字符串", "sub_name, name) mhd_mask_path = glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd' % target_phase))[0] mask_image =", "kernel_bottom = np.copy(kernel_whole) kernel_bottom[:half_size, :] = 0 return np.concatenate([ np.expand_dims(kernel_whole,", "np.where(mask_image == 1) print(xs, ys) min_x = np.min(xs) min_y =", "np.sum(np.sum(mask_image, axis=1), axis=1) return np.argmax(sum_res) # 将一个矩阵保存为图片 def save_image(image_arr, save_path):", "image[:, :, i] return new_image # def test_convert2depthfirst(): # zeros", "# 图像膨胀 # def image_expand(image, size): # def find_significant_layer(mask_image): '''", "return [x_size, y_size, z_size] def read_nii(file_path): return nipy.load_image(file_path).get_data() def read_nii_with_header(file_path):", "not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image is ', np.shape(mhd_image),", "in this version') return None def processing(image, size_training): image =", "reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series) images = reader.Execute() image_array = itk.GetArrayFromImage(images) return image_array", "image = header.pixel_array image = header.RescaleSlope * image + header.RescaleIntercept", "if __name__ == '__main__': # for phasename in ['NC', 'ART',", "# for j in range(1, slice_num + 1): # z", "res = {} res['CYST'] = 0 res['FNH'] = 1 res['HCC']", "# 返回病灶类型ID和名称的字典类型的数据 key是typeid value是typename def return_type_idname(): res = {} res[0]", "- c_minimum) / interv * s_maximum minval = 0. maxval", "= seg[:, :, i] mid_slice = np.expand_dims(volume[:, :, i], axis=0)", "np.max(mhd_image) interv = max_v - min_v mhd_image = (mhd_image -", "mhd_image = mhd_images elif suffix_name == 'npy': mhd_images = np.concatenate(np.asarray(mhd_images,", "== 1, np.uint8)) tumor_weakly_masks.append(weakly_label_mask) # i += 1 return np.asarray(imgs,", "5: 0 } from convert2jpg import extract_bboxs_mask_from_mask from config import", "else: next_slice = np.expand_dims(volume[:, :, i+1], axis=0) # pre_slice =", "'METS'] typeids = [0, 1, 2, 3, 4] def get_voxel_size(file_path):", "= image_arr image = Image.fromarray(np.asarray(image_arr_rgb, np.uint8)) image_draw = ImageDraw.Draw(image) [ys,", "np.min(xs) max_xs = np.max(xs) min_ys = np.min(ys) max_ys = np.max(ys)", "size_training): image = np.array(image) # numpy_clip c_minimum = -300. c_maximum", "max_v] = max_v mhd_image[mhd_image < min_v] = min_v print(np.mean(mhd_image, dtype=np.float32))", "= np.copy(kernel_whole) kernel_right[:, :half_size] = 0 kernel_top = np.copy(kernel_whole) kernel_top[half_size", "image = header.RescaleSlope * image + header.RescaleIntercept return image #", "order = 1 => biliniear interpolation return after_zoom def MICCAI2018_Iterator(image_dir,", "doc.createElement('size') root_node.appendChild(size_node) width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1])))", "in list(pixel_value_set): static_res[value] += np.sum(mask == value) print(static_res) def convertCase2PNGs(volume_path,", "def processing(image, size_training): image = np.array(image) # numpy_clip bottom =", "list(np.shape(image)) if len(shape) == 3: return np.squeeze(image) return image def", "np.asarray(desired_size, dtype=np.int) zooms = desired_size / np.array(image[:, :, 0].shape, dtype=np.float)", "Document typenames = ['CYST', 'FNH', 'HCC', 'HEM', 'METS'] typeids =", "Document() root_node = doc.createElement('annotation') doc.appendChild(root_node) folder_name = os.path.basename(save_dir) + '/'", "''' 将[w, h, d]reshape为[d, w, h] ''' def convert2depthfirst(image): image", "pydicom.read_file(dicom_file_path) return ds.PatientID # 返回病灶类型和ID的字典类型的数据 key是typename value是typeid def return_type_nameid(): res", "key] = type2pixel[pixel2type[key]][0] pixel_value_set = np.unique(mask) print pixel_value_set for value", "= itk.GetArrayFromImage(images) return image_array # 将DICOM序列转化成MHD文件 def convert_dicomseries2mhd(dicom_series_dir, save_path): data", "= cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size, kernel_size)) close_r = cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE, kernel) return", "np.asarray(np.squeeze(mask_image), np.uint8) max_v = 300. min_v = -350. mhd_image[mhd_image >", "suffix name does not support') assert False max_v = 300.", "= np.expand_dims(volume[:, :, i], axis=0) else: next_slice = np.expand_dims(volume[:, :,", "= return_type_nameid() return nameid_dict[typename] # 填充图像 def fill_region(image): # image.show()", "y_size = header['srow_y'][1] z_size = header['srow_z'][2] return [x_size, y_size, z_size]", "-350 interv = 500 - (-350) image -= minval #", "=> biliniear interpolation return after_zoom def MICCAI2018_Iterator(image_dir, execute_func, *parameters): '''", "= os.path.join(save_dir, phase_name, dataset_name, file_name+'.' + suffix_name) if not os.path.exists(os.path.dirname(save_path)):", "object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x)))", "== 3: mask_image = mask_image[1, :, :] print('the mask image", "cv2 from xml.dom.minidom import Document typenames = ['CYST', 'FNH', 'HCC',", "= maxval - minval # print('static scaler 0', interv) #", "= np.min(mhd_image) max_v = np.max(mhd_image) interv = max_v - min_v", "np.min(ys) max_x = np.max(xs) max_y = np.max(ys) object_node = doc.createElement('object')", "i], zooms, order=1) # order = 1 => biliniear interpolation", "config import pixel2type, type2pixel for sub_name in ['train', 'val', 'test']:", "image = np.array(image) # numpy_clip c_minimum = -300. c_maximum =", "height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image !=", "'.txt') # for evulate doc = Document() root_node = doc.createElement('annotation')", "= 180 image_arr = image_arr + 70 shape = list(np.shape(image_arr))", "iterations=1) return erosion def image_expand(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size,", "convert2depthfirst(zeros) # print np.shape(after_zeros) # test_convert2depthfirst() ''' 将[d, w, h]reshape为[w,", "< -70] = -70 image_arr[image_arr > 180] = 180 image_arr", "mask_nii_paths = glob(os.path.join(nii_dir, 'segmentation-*.nii')) for mask_nii_path in mask_nii_paths: mask_img =", "'__main__': # for phasename in ['NC', 'ART', 'PV']: # convert_dicomseries2mhd(", "order=1) # order = 1 => biliniear interpolation return after_zoom", "= np.where(labeled_mask == idx) min_xs = np.min(xs) max_xs = np.max(xs)", "np.uint8)) image.save(save_path) def show_image(image): img = np.asarray(image, np.uint8) import matplotlib.pyplot", "mask = extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir, 'tumor_types')) for key in pixel2type.keys(): mask[mask", "= 'METS' return res # 根据病灶类型的ID返回类型的字符串 def return_typename_byid(typeid): idname_dict =", "def fill_region(image): # image.show() from scipy import ndimage image =", "interv = float(c_maximum - c_minimum) image = (image - c_minimum)", "coding=utf-8 -*- import SimpleITK as itk import pydicom import numpy", "save_path): image_arr[image_arr < -70] = -70 image_arr[image_arr > 180] =", "root_node = doc.createElement('annotation') doc.appendChild(root_node) folder_name = os.path.basename(save_dir) + '/' +", "0 - 2 # image /= (interv / 2) image", "return dilation(mask_image, disk(r)) ''' 将形式如(512, 512)格式的图像转化为(1, 512, 512)形式的图片 ''' def", "opening = cv2.morphologyEx(slice_image, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))) return opening def", "os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir, phase_name, dataset_name+'_gt') if not os.path.exists(evulate_gt_dir):", "''' from skimage.measure import label volume, header = read_nii_with_header(volume_path) #", "'-' + str(i)) binary_seg_slice = np.asarray(seg_slice == 2, np.uint8) #", "0 } from convert2jpg import extract_bboxs_mask_from_mask from config import pixel2type,", "def read_nii(file_path): return nipy.load_image(file_path).get_data() def read_nii_with_header(file_path): img_obj = nipy.load_image(file_path) header_obj", "'/' + phase_name folder_node = doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name)", "= reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series) images = reader.Execute() image_array = itk.GetArrayFromImage(images) return", "min_v) / interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) save_path", "res[2] = 'HCC' res[3] = 'HEM' res[4] = 'METS' return", "image + 70 return np.array(image) # 保存mhd文件 def save_mhd_image(image, file_name):", "= np.max(ys) return image[xs_min: xs_max + 1, ys_min: ys_max +", "2 # next_start = i # for j in range(1,", "new_image = np.zeros([shape[1], shape[2], shape[0]]) for i in range(shape[0]): new_image[:,", "np.float32) image = image / interv image = image *", "np.shape(seg)[-1] print('pos_slice_num is ', pos_slice_num, total_slice_num) neg_rate = (3.0 *", "str(i)) binary_seg_slice = np.asarray(seg_slice == 2, np.uint8) # print np.max(binary_seg_slice)", ":, i] return new_image # def test_convert2depthfirst(): # zeros =", "header_obj['srow_y'][1], header_obj['srow_z'][2]] img_arr = img_obj.get_data() return img_arr, res_dict # 读取文件序列", "= max_v - min_v mhd_image = (mhd_image - min_v) /", "3]) image_arr_rgb[:, :, 0] = image_arr image_arr_rgb[:, :, 1] =", "os.path.join(image_dir, sub_name, name) mhd_mask_path = glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd' % target_phase))[0] mask_image", "plt.title('image') plt.show() # 将图像画出来,并且画出标记的病灶 def save_image_with_mask(image_arr, mask_image, save_path): image_arr[image_arr <", "'Image_%s*.mhd' % phase_name))[0] mhd_mask_path = glob(os.path.join(slice_dir, 'Mask_%s*.mhd' % phase_name))[0] mhd_image", "# next_slice = np.mean(next_slice, axis=0, keepdims=True) imgs.append( np.transpose(np.concatenate([pre_slice, mid_slice, next_slice],", "file_name + '.xml') gt_save_path = os.path.join(evulate_gt_dir, file_name + '.txt') #", "os.path.basename(os.path.dirname(slice_dir)) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.jpg') if not os.path.exists(os.path.dirname(save_path)):", "save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.jpg') if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path))", "= np.asarray(np.sum(np.sum(mask_img == 2, axis=0), axis=0)>0, np.bool) num_lesion_slices = np.sum(has_lesion)", "- j # if z < 0: # z =", "res['HCC'] = 2 res['HEM'] = 3 res['METS'] = 4 return", "705482} # {0: 1.0, 156, 1041, 146, 475, 308} static_res", "np.squeeze(image) return image def extract_ROI(image, mask_image): ''' 提取一幅图像中的ROI ''' xs,", "np.asarray(imgs, np.float32), np.asarray(masks, np.uint8), np.asarray(liver_masks, np.uint8), np.asarray( tumor_weakly_masks, np.uint8) def", "interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) save_path = os.path.join(save_dir,", "mhd_images = [] for phase_name in phasenames: mhd_image_path = glob(os.path.join(slice_dir,", ":, z]) if (i + 1) >= channel: next_slice =", "= extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir, 'tumor_types')) for key in pixel2type.keys(): mask[mask ==", "- 1 # next_slice.append(volume[:, :, z]) if (i + 1)", "== key] = type2pixel[pixel2type[key]][0] pixel_value_set = np.unique(mask) print pixel_value_set for", "'CYST' res[1] = 'FNH' res[2] = 'HCC' res[3] = 'HEM'", "image shape is ', np.shape(mask_image)) if suffix_name == 'jpg': mhd_images", "glob(os.path.join(nii_dir, 'segmentation-*.nii')) for mask_nii_path in mask_nii_paths: mask_img = read_nii(mask_nii_path) has_lesion", "# i += 1 return np.asarray(imgs, np.float32), np.asarray(masks, np.uint8), np.asarray(liver_masks,", "# -*- coding=utf-8 -*- import SimpleITK as itk import pydicom", "def test_convert2depthfirst(): # zeros = np.zeros([100, 100, 30]) # after_zeros", "image + header.RescaleIntercept return image # 读取mhd文件 def read_mhd_image(file_path, rejust=False):", "shape of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) #cv2.imwrite(save_path, mhd_image", "os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') # for name in names: # path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2',", "size): image = Image.fromarray(np.asarray(image, np.uint8)) return image.resize((size, size)) # def", "= 500 - (-350) image -= minval # scale down", "image_erode(img, kernel_size=5): import cv2 import numpy as np kernel =", "0 return np.concatenate([ np.expand_dims(kernel_whole, axis=0), np.expand_dims(kernel_left, axis=0), np.expand_dims(kernel_right, axis=0), np.expand_dims(kernel_top,", "image_array = itk.GetArrayFromImage(images) return image_array # 将DICOM序列转化成MHD文件 def convert_dicomseries2mhd(dicom_series_dir, save_path):", "doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node =", "centroid_z = int(np.mean(z)) return centroid_x, centroid_y, centroid_z ''' 将[w, h,", "* 2.0 # zoom desired_size = [size_training, size_training] desired_size =", "centroid_x, centroid_y, centroid_z ''' 将[w, h, d]reshape为[d, w, h] '''", "for sub_name in ['train', 'val', 'test']: names = os.listdir(os.path.join(image_dir, sub_name))", "os.path.join(save_dir, phase_name, dataset_name+'_gt') if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir,", "float(c_maximum - c_minimum) image = (image - c_minimum) / interv", "return_type_nameid() return nameid_dict[typename] # 填充图像 def fill_region(image): # image.show() from", "if len(shape) == 2: mask_image = np.expand_dims(mask_image, axis=0) print('after expand23D',", "1:, :] = 0 kernel_bottom = np.copy(kernel_whole) kernel_bottom[:half_size, :] =", "# next_start = i + slice_num / 2 # next_start", "3 res['METS'] = 4 return res # 返回病灶类型ID和名称的字典类型的数据 key是typeid value是typename", "shape is ', np.shape(mask_image)) if suffix_name == 'jpg': mhd_images =", "mask[mask == key] = type2pixel[pixel2type[key]][0] pixel_value_set = np.unique(mask) print pixel_value_set", "keepdims=True) imgs.append( np.transpose(np.concatenate([pre_slice, mid_slice, next_slice], axis=0), axes=[1, 2, 0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1]", "# for phasename in ['NC', 'ART', 'PV']: # convert_dicomseries2mhd( #", "phase_name == target_phase: target_mask = mask_image print(np.shape(mhd_images)) mask_image = target_mask", "read_dicom_series(dicom_series_dir) save_mhd_image(data, save_path) # 读取单个DICOM文件 def read_dicom_file(file_name): header = pydicom.read_file(file_name)", "# names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') # for name in names: #", "= 300. min_v = -350. mhd_image[mhd_image > max_v] = max_v", "sum_res = np.sum(np.sum(mask_image, axis=1), axis=1) return np.argmax(sum_res) # 将一个矩阵保存为图片 def", "image = np.asarray(image, np.float32) image = image / interv image", "else: image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0] + '_ROI.jpg')) del image, ROI_Image gc.collect()", "2, axis=0), axis=0)>0, np.bool) num_lesion_slices = np.sum(has_lesion) print os.path.basename(mask_nii_path), num_lesion_slices,", "mhd_images.append(mhd_image) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) if phase_name == target_phase: target_mask", "(kernel_size, kernel_size)) close_r = cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE, kernel) return close_r def", "{} res_dict['voxel_spacing'] = [header_obj['srow_x'][0], header_obj['srow_y'][1], header_obj['srow_z'][2]] img_arr = img_obj.get_data() return", "file_name) # 根据文件名返回期项名 def return_phasename(file_name): phasenames = ['NC', 'ART', 'PV']", "mhd_image[mhd_image < min_v] = min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image -= np.mean(mhd_image)", "[x, y, z] = np.where(image == flag) centroid_x = int(np.mean(x))", "axis=2) mhd_image = np.concatenate([mhd_image, mhd_image, mhd_image], axis=2) mask_image = np.asarray(np.squeeze(mask_image),", "def compress22dim(image): ''' 将一个矩阵如果可能,压缩到三维的空间 ''' shape = list(np.shape(image)) if len(shape)", "return centroid_x, centroid_y, centroid_z ''' 将[w, h, d]reshape为[d, w, h]", "np.shape(image)[2]]) for i in range(np.shape(after_zoom)[2]): after_zoom[:, :, i] = scipy.ndimage.zoom(image[:,", "as f: f.writelines(lines) f.close() def dicom2jpg_multiphase(slice_dir, save_dir, phasenames=['NC', 'ART', 'PV'],", "min_v) / interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) phase_name", "(volume - min_v) / interv z_axis_case = header['voxel_spacing'][-1] slice_num =", "bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node", "1:maxy + 1, minx - 1:maxx + 1, :] ROI_Image", "np.where(mask_image != 0) miny = np.min(ys) maxy = np.max(ys) minx", "for idx in range(1, np.max(labeled_mask) + 1): xs, ys =", "if z < 0: # z = 0 # pre_slice.append(volume[:,", "os.path.join(cur_slice_dir, 'tumor_types')) for key in pixel2type.keys(): mask[mask == key] =", "= 0. maxval = 255. image -= minval interv =", "float minval = -350 interv = 500 - (-350) image", "itk import pydicom import numpy as np from PIL import", "def close_operation(binary_image, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size, kernel_size)) close_r =", "= os.path.join(save_dir, phase_name, dataset_name+'_xml') if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir =", "= mhd_images elif suffix_name == 'npy': mhd_images = np.concatenate(np.asarray(mhd_images, np.float),", "-= np.mean(mhd_image) min_v = np.min(mhd_image) max_v = np.max(mhd_image) interv =", "= np.array(image) shape = np.shape(image) new_image = np.zeros([shape[1], shape[2], shape[0]])", "root_node.appendChild(size_node) width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node", "# volume = np.transpose(volume, [1, 0, 2]) volume = np.asarray(volume,", "* 255) np.save(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name,", "ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb') as f: f.write(doc.toprettyxml(indent='\\t', encoding='utf-8')) line", "zoom desired_size = [size_training, size_training] desired_size = np.asarray(desired_size, dtype=np.int) zooms", "500. image = np.clip(image, bottom, top) # to float minval", "is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) #cv2.imwrite(save_path, mhd_image * 255) np.save(save_path,", "MICCAI2018_Iterator(image_dir, execute_func, *parameters): ''' 遍历MICCAI2018文件夹的框架 :param execute_func: :return: ''' for", "= image[i, :, :] return new_image def read_image_file(file_path): if file_path.endswith('.nii'):", "= Image.fromarray(np.asarray(image_arr, np.uint8)) image.save(save_path) def show_image(image): img = np.asarray(image, np.uint8)", "header['srow_z'][2] return [x_size, y_size, z_size] def read_nii(file_path): return nipy.load_image(file_path).get_data() def", "y, z] = np.where(image == flag) centroid_x = int(np.mean(x)) centroid_y", "def expand23D(mask_image): shape = list(np.shape(mask_image)) if len(shape) == 2: mask_image", "2 kernel_left = np.copy(kernel_whole) kernel_left[:, half_size + 1:] = 0", "np.expand_dims(volume[:, :, i], axis=0) pre_slice = [] # pre_end =", "idname_dict[typeid] # 根据病灶类型的name返回id的字符串 def return_typeid_byname(typename): nameid_dict = return_type_nameid() return nameid_dict[typename]", "= np.shape(image) new_image = np.zeros([shape[1], shape[2], shape[0]]) for i in", "def read_nii_with_header(file_path): img_obj = nipy.load_image(file_path) header_obj = img_obj.header res_dict =", "= os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) phase_name = ''.join(phasenames) save_path =", "== target_phase: target_mask = mask_image print(np.shape(mhd_images)) mask_image = target_mask mask_image_shape", "idx) min_xs = np.min(xs) max_xs = np.max(xs) min_ys = np.min(ys)", "print('the suffix name does not support') assert False max_v =", "+ '_ROI.jpg')) del image, ROI_Image gc.collect() def compress22dim(image): ''' 将一个矩阵如果可能,压缩到三维的空间", "1): xs, ys = np.where(labeled_mask == idx) min_xs = np.min(xs)", "flag): [x, y, z] = np.where(image == flag) centroid_x =", "-70] = -70 image_arr[image_arr > 180] = 180 image_arr =", "image_arr_rgb[:, :, 2] = image_arr image = Image.fromarray(np.asarray(image_arr_rgb, np.uint8)) image_draw", "= np.concatenate(np.asarray(mhd_images, np.float), axis=0) mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2,", "close_r def open_operation(slice_image, kernel_size=3): opening = cv2.morphologyEx(slice_image, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size,", "axis=1) return np.argmax(sum_res) # 将一个矩阵保存为图片 def save_image(image_arr, save_path): image =", "truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0'))", "y_size, z_size] def read_nii(file_path): return nipy.load_image(file_path).get_data() def read_nii_with_header(file_path): img_obj =", "i in range(shape[2]): new_image[i, :, :] = image[:, :, i]", "glob(os.path.join(slice_dir, 'Image_%s*.mhd' % phase_name))[0] mhd_mask_path = glob(os.path.join(slice_dir, 'Mask_%s*.mhd' % phase_name))[0]", "/ interv z_axis_case = header['voxel_spacing'][-1] slice_num = int(z_axis / z_axis_case)", "kernel_left = np.copy(kernel_whole) kernel_left[:, half_size + 1:] = 0 kernel_right", "size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image != 1] = 0 xs, ys", "expand23D', np.shape(mask_image)) return mask_image ''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 ''' def find_centroid3D(image, flag):", "image_expand(image, size): # def find_significant_layer(mask_image): ''' 找到显著层 :param mask_image: [depth,", "def get_voxel_size(file_path): load_image_obj = nipy.load_image(file_path) header = load_image_obj.header x_size =", "500 - (-350) image -= minval # scale down to", "j in range(1, slice_num + 1): # z = next_start", "width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node =", "= image + 70 return np.array(image) # 保存mhd文件 def save_mhd_image(image,", "image = cv2.erode(img, kernel) return image # 图像膨胀 # def", "image_draw.point([xs[index], y], fill=(255, 0, 0)) if save_path is None: image.show()", "i += 1 return np.asarray(imgs, np.float32), np.asarray(masks, np.uint8), np.asarray(liver_masks, np.uint8),", "for phase_name in phasenames: mhd_image_path = glob(os.path.join(slice_dir, 'Image_%s*.mhd' % phase_name))[0]", "return img_arr, res_dict # 读取文件序列 def read_dicom_series(dir_name): reader = itk.ImageSeriesReader()", "phase_name + '_Mask*.mhd'))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image", "filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image)) size_node = doc.createElement('size') root_node.appendChild(size_node) width_node =", "mask_image_shape = list(np.shape(mask_image)) if len(mask_image_shape) == 3: mask_image = mask_image[1,", "image = image * 2.0 # zoom desired_size = [size_training,", "axes=[1, 2, 0]) mhd_image = mhd_images elif suffix_name == 'npy':", "= image_arr image_arr_rgb[:, :, 2] = image_arr image = Image.fromarray(np.asarray(image_arr_rgb,", "as f: f.writelines(lines) f.close() def static_pixel_num(image_dir, target_phase='PV'): # {0: 217784361,", "+ 1): # z = pre_end - j # if", "np.max(mhd_image)) cv2.imwrite(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml')", "2, 0]) mhd_image = mhd_images else: print('the suffix name does", "static_res = { 0: 0, 1: 0, 2: 0, 3:", "+ 1) >= channel: next_slice = np.expand_dims(volume[:, :, i], axis=0)", "= 0 kernel_top = np.copy(kernel_whole) kernel_top[half_size + 1:, :] =", "new_image[:, :, i] = image[i, :, :] return new_image def", "reader.SetFileNames(dicom_series) images = reader.Execute() image_array = itk.GetArrayFromImage(images) return image_array #", "np.uint8) max_v = 300. min_v = -350. mhd_image[mhd_image > max_v]", "458278, 5: 705482} # {0: 1.0, 156, 1041, 146, 475,", "pixel2type.keys(): mask[mask == key] = type2pixel[pixel2type[key]][0] pixel_value_set = np.unique(mask) print", "axes=[1, 2, 0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-' + str(i)) binary_seg_slice =", "kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.dilate(img, kernel)", "image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0] + '_ROI.jpg')) del image, ROI_Image gc.collect() def", "axis=0) print('after expand23D', np.shape(mask_image)) return mask_image ''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 ''' def", "volume[volume > max_v] = max_v volume[volume < min_v] = min_v", "np.save(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml') if", "ys = np.where(labeled_mask == idx) min_xs = np.min(xs) max_xs =", "3, 4] def get_voxel_size(file_path): load_image_obj = nipy.load_image(file_path) header = load_image_obj.header", "def return_phasename(file_name): phasenames = ['NC', 'ART', 'PV'] for phasename in", "h, d] ''' def convert2depthlastest(image): image = np.array(image) shape =", "for key in pixel2type.keys(): mask[mask == key] = type2pixel[pixel2type[key]][0] pixel_value_set", "(3.0 * pos_slice_num) / total_slice_num # 正样本是负样本的 if neg_rate >", "+ 1): xs, ys = np.where(labeled_mask == idx) min_xs =", "np.array(image[:, :, 0].shape, dtype=np.float) print(zooms) after_zoom = np.zeros([size_training, size_training, np.shape(image)[2]])", "axis=0), axis=0)>0, np.bool) num_lesion_slices = np.sum(has_lesion) print os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1]", "cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.erode(img, kernel) return image #", "%d %d %d\\n' % ('Cyst', min_y, min_x, max_y, max_x) print(line)", "# 填充图像 def fill_region(image): # image.show() from scipy import ndimage", "as itk import pydicom import numpy as np from PIL", "gc.collect() def compress22dim(image): ''' 将一个矩阵如果可能,压缩到三维的空间 ''' shape = list(np.shape(image)) if", "pixel_value_set = np.unique(mask) print pixel_value_set for value in list(pixel_value_set): static_res[value]", "2] = image_arr image = Image.fromarray(np.asarray(image_arr_rgb, np.uint8)) image_draw = ImageDraw.Draw(image)", "doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3)))", "- minval # print('static scaler 0', interv) # scale down", "> max_v] = max_v mhd_image[mhd_image < min_v] = min_v print(np.mean(mhd_image,", "+ '_Mask*.mhd'))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image =", "enumerate(ys): image_draw.point([xs[index], y], fill=(255, 0, 0)) if save_path is None:", ":return: ''' mask_nii_paths = glob(os.path.join(nii_dir, 'segmentation-*.nii')) for mask_nii_path in mask_nii_paths:", "for i in range(shape[2]): new_image[i, :, :] = image[:, :,", "= itk.ReadImage(file_path) image = np.array(itk.GetArrayFromImage(header)) if rejust: image[image < -70]", "max_v = 250. min_v = -200. # max_v = 180", "np.shape(image) new_image = np.zeros([shape[1], shape[2], shape[0]]) for i in range(shape[0]):", "doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node =", "statics_num_slices_lesion(nii_dir): ''' 统计每个case,有多少slice具有病灶 :param nii_dir: :return: ''' mask_nii_paths = glob(os.path.join(nii_dir,", "seg_path: :return: ''' from skimage.measure import label volume, header =", "= cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.dilate(img, kernel) return image", "# numpy_clip c_minimum = -300. c_maximum = 500. s_maximum =", "nii_dir: :return: ''' mask_nii_paths = glob(os.path.join(nii_dir, 'segmentation-*.nii')) for mask_nii_path in", "= doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name = file_name + '.jpg' filename_node =", "} from convert2jpg import extract_bboxs_mask_from_mask from config import pixel2type, type2pixel", "np.copy(kernel_whole) kernel_top[half_size + 1:, :] = 0 kernel_bottom = np.copy(kernel_whole)", "'Mask_%s*.mhd' % phase_name))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image", "pos_slice_num = np.sum(np.sum(np.sum(seg == 2, axis=0), axis=0) != 0) total_slice_num", ":param kernel_size: :return: [5, kernel_size, kernel_size] ''' kernel_whole = np.ones([kernel_size,", "pixel_value_set for value in list(pixel_value_set): static_res[value] += np.sum(mask == value)", "kernel_size, kernel_size] ''' kernel_whole = np.ones([kernel_size, kernel_size], np.uint8) half_size =", "open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def dicom2jpg_multiphase(slice_dir, save_dir, phasenames=['NC',", "image.resize((size, size)) # def image_expand(mask_image, r): # return dilation(mask_image, disk(r))", "os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name) # image = read_nil(path) # print(np.shape(image)) # conver2JPG", "convert2jpg import extract_bboxs_mask_from_mask from config import pixel2type, type2pixel for sub_name", "xs, ys = np.where(mask_image == 1) xs_min = np.min(xs) xs_max", "# print np.shape(after_zeros) # test_convert2depthfirst() ''' 将[d, w, h]reshape为[w, h,", "to float minval = -350 interv = 500 - (-350)", "-350. mhd_image[mhd_image > max_v] = max_v mhd_image[mhd_image < min_v] =", "# test_convert2depthfirst() ''' 将[d, w, h]reshape为[w, h, d] ''' def", "read_dicom_series(dir_name): reader = itk.ImageSeriesReader() dicom_series = reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series) images =", "1] = 0 xs, ys = np.where(mask_image == 1) min_x", "mhd_image_path = glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0] mhd_mask_path = glob(os.path.join(slice_dir, phase_name + '_Mask*.mhd'))[0]", "4: 0, 5: 0 } from convert2jpg import extract_bboxs_mask_from_mask from", "header_obj = img_obj.header res_dict = {} res_dict['voxel_spacing'] = [header_obj['srow_x'][0], header_obj['srow_y'][1],", "import label volume, header = read_nii_with_header(volume_path) # volume = np.transpose(volume,", "np from PIL import Image, ImageDraw import gc from skimage.morphology", "print os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1] if __name__ == '__main__': # for", "np.array(itk.GetArrayFromImage(header)) if rejust: image[image < -70] = -70 image[image >", "''' 提取一幅图像中的ROI ''' xs, ys = np.where(mask_image == 1) xs_min", "= os.path.basename(save_dir) + '/' + phase_name folder_node = doc.createElement('folder') root_node.appendChild(folder_node)", "250. min_v = -200. # max_v = 180 # min_v", "np.array(image) shape = np.shape(image) new_image = np.zeros([shape[2], shape[0], shape[1]]) for", "= min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image -= np.mean(mhd_image) min_v = np.min(mhd_image)", "np.zeros([100, 100, 30]) # after_zeros = convert2depthfirst(zeros) # print np.shape(after_zeros)", "np.asarray(np.squeeze(mask_image), np.uint8) if phase_name == target_phase: target_mask = mask_image print(np.shape(mhd_images))", "# {0: 1.0, 156, 1041, 146, 475, 308} static_res =", "None def processing(image, size_training): image = np.array(image) # numpy_clip bottom", "xs, ys = np.where(labeled_mask == idx) min_xs = np.min(xs) max_xs", "dataset_name = os.path.basename(os.path.dirname(slice_dir)) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.jpg') if", "with open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def dicom2jpg_multiphase(slice_dir, save_dir,", "mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml') if not", "w, h]reshape为[w, h, d] ''' def convert2depthlastest(image): image = np.array(image)", "width, height] :return: idx ''' sum_res = np.sum(np.sum(mask_image, axis=1), axis=1)", "== 1) print(xs, ys) min_x = np.min(xs) min_y = np.min(ys)", "rejust: image[image < -70] = -70 image[image > 180] =", "= read_dicom_series(dicom_series_dir) save_mhd_image(data, save_path) # 读取单个DICOM文件 def read_dicom_file(file_name): header =", "names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-' + str(i)) binary_seg_slice = np.asarray(seg_slice == 2,", "f: f.writelines(lines) f.close() def dicom2jpg_multiphase(slice_dir, save_dir, phasenames=['NC', 'ART', 'PV'], target_phase='PV',", "# for name in names: # path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name)", "res = {} res[0] = 'CYST' res[1] = 'FNH' res[2]", "'test']: names = os.listdir(os.path.join(image_dir, sub_name)) for name in names: cur_slice_dir", "itk.GetArrayFromImage(images) return image_array # 将DICOM序列转化成MHD文件 def convert_dicomseries2mhd(dicom_series_dir, save_path): data =", "image_arr = image_arr + 70 shape = list(np.shape(image_arr)) image_arr_rgb =", "mhd_image, mhd_image], axis=2) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) max_v = 300.", "= np.where(mask_image == 1) print(xs, ys) min_x = np.min(xs) min_y", "= np.zeros([shape[1], shape[2], shape[0]]) for i in range(shape[0]): new_image[:, :,", "- (-350) image -= minval # scale down to 0", "mask_image = np.expand_dims(mask_image, axis=0) print('after expand23D', np.shape(mask_image)) return mask_image '''", "read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_images.append(mhd_image) mask_image = np.asarray(np.squeeze(mask_image), np.uint8)", "image[image > 180] = 180 image = image + 70", "data = read_dicom_series(dicom_series_dir) save_mhd_image(data, save_path) # 读取单个DICOM文件 def read_dicom_file(file_name): header", ">= channel: # z = channel - 1 # next_slice.append(volume[:,", "180 image = image + 70 return np.array(image) # 保存mhd文件", "# z = 0 # pre_slice.append(volume[:, :, z]) if (i", "1 # next_slice.append(volume[:, :, z]) if (i + 1) >=", "axis=0) next_slice = [] # next_start = i + slice_num", "object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox')", "np.uint8)) for index, y in enumerate(ys): image_draw.point([xs[index], y], fill=(255, 0,", "0', interv) # scale down to 0 - 2 #", "+ 1 pos_slice_num = np.sum(np.sum(np.sum(seg == 2, axis=0), axis=0) !=", "= doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with", "# if z < 0: # z = 0 #", "i in range(channel): seg_slice = seg[:, :, i] mid_slice =", "= [] names = [] masks = [] tumor_weakly_masks =", "''' 将一个矩阵如果可能,压缩到三维的空间 ''' shape = list(np.shape(image)) if len(shape) == 3:", "np.zeros([size_training, size_training, np.shape(image)[2]]) for i in range(np.shape(after_zoom)[2]): after_zoom[:, :, i]", "+= np.sum(mask == value) print(static_res) def convertCase2PNGs(volume_path, seg_path, save_dir=None, z_axis=5.0,", "np.float), axis=0) mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image", "image_arr image = Image.fromarray(np.asarray(image_arr_rgb, np.uint8)) image_draw = ImageDraw.Draw(image) [ys, xs]", "interv image = image * 2.0 # zoom desired_size =", "min_v = np.min(mhd_image) max_v = np.max(mhd_image) interv = max_v -", "name in names: # path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name) # image", "shape[0], shape[1]]) for i in range(shape[2]): new_image[i, :, :] =", "next_slice.append(volume[:, :, z]) if (i + 1) >= channel: next_slice", "= itk.ImageSeriesReader() dicom_series = reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series) images = reader.Execute() image_array", "读取单个DICOM文件 def read_dicom_file(file_name): header = pydicom.read_file(file_name) image = header.pixel_array image", "= np.copy(kernel_whole) kernel_bottom[:half_size, :] = 0 return np.concatenate([ np.expand_dims(kernel_whole, axis=0),", "1, minx - 1:maxx + 1, :] ROI_Image = Image.fromarray(np.asarray(ROI,", "= cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.erode(img, kernel) return image", "nii的路径 :param seg_path: :return: ''' from skimage.measure import label volume,", "target_phase='PV', suffix_name='npy'): target_mask = None mhd_images = [] for phase_name", "= list(np.shape(image)) if len(shape) == 3: return np.squeeze(image) return image", "image * 2.0 # zoom desired_size = [size_training, size_training] desired_size", "0: 0, 1: 0, 2: 0, 3: 0, 4: 0,", "root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name = file_name + '.jpg'", "import cv2 from xml.dom.minidom import Document typenames = ['CYST', 'FNH',", "[] # pre_end = i - slice_num / 2 #", "np.zeros_like(binary_seg_slice, np.uint8) for idx in range(1, np.max(labeled_mask) + 1): xs,", "plt.axis('on') plt.title('image') plt.show() # 将图像画出来,并且画出标记的病灶 def save_image_with_mask(image_arr, mask_image, save_path): image_arr[image_arr", "width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node)", "r): # return dilation(mask_image, disk(r)) ''' 将形式如(512, 512)格式的图像转化为(1, 512, 512)形式的图片", "1: 1392043, 2: 209128, 3: 1486676, 4: 458278, 5: 705482}", "size_node = doc.createElement('size') root_node.appendChild(size_node) width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node =", "to 0 - 2 image /= (interv / 2) #", "ndimage image = ndimage.binary_fill_holes(image).astype(np.uint8) return image def close_operation(binary_image, kernel_size=5): kernel", "(i + 1) >= channel: next_slice = np.expand_dims(volume[:, :, i],", "kernel_top = np.copy(kernel_whole) kernel_top[half_size + 1:, :] = 0 kernel_bottom", "label volume, header = read_nii_with_header(volume_path) # volume = np.transpose(volume, [1,", "f.writelines(lines) f.close() def static_pixel_num(image_dir, target_phase='PV'): # {0: 217784361, 1: 1392043,", ":] return new_image def read_image_file(file_path): if file_path.endswith('.nii'): return read_nil(file_path) if", "for phasename in phasenames: if file_name.find(phasename) != -1: return phasename", "np kernel = np.ones((kernel_size, kernel_size), np.uint8) erosion = cv2.erode(img, kernel,", "doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node =", "min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image -= np.mean(mhd_image) min_v = np.min(mhd_image) max_v", "# z = channel - 1 # next_slice.append(volume[:, :, z])", "= np.shape(seg)[-1] print('pos_slice_num is ', pos_slice_num, total_slice_num) neg_rate = (3.0", "mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_image = np.expand_dims(mhd_image,", "# scale down to 0 - 2 # image /=", "np.min(mhd_image), np.max(mhd_image)) cv2.imwrite(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name,", "print('the mask image shape is ', np.shape(mask_image)) if suffix_name ==", "min_v volume -= np.mean(volume) min_v = np.min(volume) max_v = np.max(volume)", "1) >= channel: next_slice = np.expand_dims(volume[:, :, i], axis=0) else:", "interv z_axis_case = header['voxel_spacing'][-1] slice_num = int(z_axis / z_axis_case) if", "# min_v = -70 volume[volume > max_v] = max_v volume[volume", "return res # 根据病灶类型的ID返回类型的字符串 def return_typename_byid(typeid): idname_dict = return_type_idname() return", "phase_name folder_node = doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name", "cv2.morphologyEx(slice_image, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))) return opening def get_kernel_filters(kernel_size): '''", "z < 0: # z = 0 # pre_slice.append(volume[:, :,", "== 2, axis=0), axis=0)>0, np.bool) num_lesion_slices = np.sum(has_lesion) print os.path.basename(mask_nii_path),", "= read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_image", "for j in range(1, slice_num + 1): # z =", "def statics_num_slices_lesion(nii_dir): ''' 统计每个case,有多少slice具有病灶 :param nii_dir: :return: ''' mask_nii_paths =", "1, ys_min: ys_max + 1] def resize_image(image, size): image =", "phase_name) # conver2JPG multi phase # image_dir = '/home/give/Documents/dataset/LiverLesionDetection_Splited/0' #", "np.ones([kernel_size, kernel_size], np.uint8) half_size = kernel_size // 2 kernel_left =", "mhd_mask_path = glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd' % target_phase))[0] mask_image = read_mhd_image(mhd_mask_path) min_xs,", "axis=2) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) max_v = 300. min_v =", "seg = read_nii(seg_path) # print np.shape(volume), np.shape(seg) [_, _, channel]", "interv = max_v - min_v mhd_image = (mhd_image - min_v)", "scipy import ndimage image = ndimage.binary_fill_holes(image).astype(np.uint8) return image def close_operation(binary_image,", "conver2JPG single phase # image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' # save_dir =", "def read_dicom_series(dir_name): reader = itk.ImageSeriesReader() dicom_series = reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series) images", "in range(shape[2]): new_image[i, :, :] = image[:, :, i] return", "# z = pre_end - j # if z <", "= np.min(ys) ys_max = np.max(ys) return image[xs_min: xs_max + 1,", "= doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image != 1] =", "print('the shape of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) cv2.imwrite(save_path,", "phase_name, dataset_name+'_gt') if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir, file_name", "version') return None def processing(image, size_training): image = np.array(image) #", "not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir, file_name + '.xml') gt_save_path", "def preprocessing_agumentation(image, size_training): image = np.array(image) # numpy_clip c_minimum =", "mhd_image[mhd_image > max_v] = max_v mhd_image[mhd_image < min_v] = min_v", "shape = np.shape(image) new_image = np.zeros([shape[2], shape[0], shape[1]]) for i", "c_minimum, c_maximum) interv = float(c_maximum - c_minimum) image = (image", "min_v = -200. # max_v = 180 # min_v =", "= np.sum(np.sum(mask_image, axis=1), axis=1) return np.argmax(sum_res) # 将一个矩阵保存为图片 def save_image(image_arr,", "提取一幅图像中的ROI ''' xs, ys = np.where(mask_image == 1) xs_min =", "np.array(image) # 保存mhd文件 def save_mhd_image(image, file_name): header = itk.GetImageFromArray(image) itk.WriteImage(header,", "= min_v volume -= np.mean(volume) min_v = np.min(volume) max_v =", "is ', pos_slice_num, total_slice_num) neg_rate = (3.0 * pos_slice_num) /", "from skimage.measure import label volume, header = read_nii_with_header(volume_path) # volume", "# MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir, phase_name) # conver2JPG multi phase #", "for i in range(np.shape(after_zoom)[2]): after_zoom[:, :, i] = scipy.ndimage.zoom(image[:, :,", "import gc from skimage.morphology import disk, dilation import nipy import", "填充图像 def fill_region(image): # image.show() from scipy import ndimage image", "format of image is not support in this version') return", "import matplotlib.pyplot as plt plt.figure(\"Image\") # 这里必须加 cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img,", "os.path.join(xml_save_dir, file_name + '.xml') gt_save_path = os.path.join(evulate_gt_dir, file_name + '.txt')", "= os.path.basename(os.path.dirname(slice_dir)) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.jpg') if not", ":, i] mid_slice = np.expand_dims(volume[:, :, i], axis=0) pre_slice =", "extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir, 'tumor_types')) for key in pixel2type.keys(): mask[mask == key]", "0 kernel_top = np.copy(kernel_whole) kernel_top[half_size + 1:, :] = 0", "xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb')", "itk.ReadImage(file_path) image = np.array(itk.GetArrayFromImage(header)) if rejust: image[image < -70] =", "pre_slice = np.expand_dims(volume[:, :, i-1], axis=0) next_slice = [] #", "{0: 217784361, 1: 1392043, 2: 209128, 3: 1486676, 4: 458278,", "if suffix_name == 'jpg': mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2,", ":return: ''' for sub_name in ['train', 'val', 'test']: names =", "2, 0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-' + str(i)) binary_seg_slice = np.asarray(seg_slice", "= int(np.mean(y)) centroid_z = int(np.mean(z)) return centroid_x, centroid_y, centroid_z '''", "# pre_end = i - slice_num / 2 # pre_end", "#cv2.imwrite(save_path, mhd_image * 255) np.save(save_path, mhd_image * 255) xml_save_dir =", "np.array(image) # numpy_clip bottom = -300. top = 500. image", "axis=0), axis=0) != 0) total_slice_num = np.shape(seg)[-1] print('pos_slice_num is ',", "f: f.writelines(lines) f.close() def static_pixel_num(image_dir, target_phase='PV'): # {0: 217784361, 1:", "= glob(os.path.join(slice_dir, 'Image_%s*.mhd' % phase_name))[0] mhd_mask_path = glob(os.path.join(slice_dir, 'Mask_%s*.mhd' %", "as np kernel = np.ones((kernel_size, kernel_size), np.uint8) erosion = cv2.erode(img,", "1:maxx + 1, :] ROI_Image = Image.fromarray(np.asarray(ROI, np.uint8)) for index,", "c_maximum) interv = float(c_maximum - c_minimum) image = (image -", "cv2.imwrite(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml') if", ":half_size] = 0 kernel_top = np.copy(kernel_whole) kernel_top[half_size + 1:, :]", "folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name = file_name + '.jpg' filename_node", "def save_image_with_mask(image_arr, mask_image, save_path): image_arr[image_arr < -70] = -70 image_arr[image_arr", "def extract_ROI(image, mask_image): ''' 提取一幅图像中的ROI ''' xs, ys = np.where(mask_image", "1] = 0 xs, ys = np.where(mask_image == 1) print(xs,", "def image_erode(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image =", "# 保存mhd文件 def save_mhd_image(image, file_name): header = itk.GetImageFromArray(image) itk.WriteImage(header, file_name)", "in names: cur_slice_dir = os.path.join(image_dir, sub_name, name) execute_func(cur_slice_dir, *parameters) def", "axis=0), np.expand_dims(kernel_right, axis=0), np.expand_dims(kernel_top, axis=0), np.expand_dims(kernel_bottom, axis=0), ], axis=0) def", "= np.copy(kernel_whole) kernel_left[:, half_size + 1:] = 0 kernel_right =", "{0: 1.0, 156, 1041, 146, 475, 308} static_res = {", "h, d]reshape为[d, w, h] ''' def convert2depthfirst(image): image = np.array(image)", "np.asarray(np.squeeze(mhd_image), np.float32) mhd_image = np.expand_dims(mhd_image, axis=2) mhd_image = np.concatenate([mhd_image, mhd_image,", "image # 图像膨胀 # def image_expand(image, size): # def find_significant_layer(mask_image):", "image = Image.fromarray(np.asarray(image_arr_rgb, np.uint8)) image_draw = ImageDraw.Draw(image) [ys, xs] =", "i], axis=0) else: pre_slice = np.expand_dims(volume[:, :, i-1], axis=0) next_slice", "/= (interv / 2) # zoom desired_size = [size_training, size_training]", "centroid_x = int(np.mean(x)) centroid_y = int(np.mean(y)) centroid_z = int(np.mean(z)) return", "''' def expand23D(mask_image): shape = list(np.shape(mask_image)) if len(shape) == 2:", "kernel) return close_r def open_operation(slice_image, kernel_size=3): opening = cv2.morphologyEx(slice_image, cv2.MORPH_OPEN,", "volume -= np.mean(volume) min_v = np.min(volume) max_v = np.max(volume) interv", "i] return new_image # def test_convert2depthfirst(): # zeros = np.zeros([100,", "kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.erode(img, kernel) return", "'HCC', 'HEM', 'METS'] typeids = [0, 1, 2, 3, 4]", "save_mhd_image(image, file_name): header = itk.GetImageFromArray(image) itk.WriteImage(header, file_name) # 根据文件名返回期项名 def", "i-1], axis=0) next_slice = [] # next_start = i +", "phase_name+'_Image*.mhd'))[0] mhd_mask_path = glob(os.path.join(slice_dir, phase_name + '_Mask*.mhd'))[0] mhd_image = read_mhd_image(mhd_image_path)", "0) miny = np.min(ys) maxy = np.max(ys) minx = np.min(xs)", "= float(c_maximum - c_minimum) image = (image - c_minimum) /", "min_y, min_x, max_y, max_x) print(line) lines = [] lines.append(line) with", "import pydicom import numpy as np from PIL import Image,", "np.min(xs) min_y = np.min(ys) max_x = np.max(xs) max_y = np.max(ys)", "70 shape = list(np.shape(image_arr)) image_arr_rgb = np.zeros(shape=[shape[0], shape[1], 3]) image_arr_rgb[:,", "kernel_size], np.uint8) half_size = kernel_size // 2 kernel_left = np.copy(kernel_whole)", "name in names: cur_slice_dir = os.path.join(image_dir, sub_name, name) mhd_mask_path =", "= -350. mhd_image[mhd_image > max_v] = max_v mhd_image[mhd_image < min_v]", "ys = np.where(mask_image == 1) min_x = np.min(xs) min_y =", "# conver2JPG single phase # image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' # save_dir", "f.close() def dicom2jpg_multiphase(slice_dir, save_dir, phasenames=['NC', 'ART', 'PV'], target_phase='PV', suffix_name='npy'): target_mask", "pre_end = i - slice_num / 2 # pre_end =", "names: cur_slice_dir = os.path.join(image_dir, sub_name, name) mhd_mask_path = glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd'", "= np.min(ys) maxy = np.max(ys) minx = np.min(xs) maxx =", "< min_v] = min_v volume -= np.mean(volume) min_v = np.min(volume)", "image = Image.fromarray(np.asarray(image_arr, np.uint8)) image.save(save_path) def show_image(image): img = np.asarray(image,", ":, i], axis=0) pre_slice = [] # pre_end = i", "dicom_series = reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series) images = reader.Execute() image_array = itk.GetArrayFromImage(images)", "= [header_obj['srow_x'][0], header_obj['srow_y'][1], header_obj['srow_z'][2]] img_arr = img_obj.get_data() return img_arr, res_dict", "target_phase='PV'): # {0: 217784361, 1: 1392043, 2: 209128, 3: 1486676,", "execute_func, *parameters): ''' 遍历MICCAI2018文件夹的框架 :param execute_func: :return: ''' for sub_name", "len(shape) == 2: mask_image = np.expand_dims(mask_image, axis=0) print('after expand23D', np.shape(mask_image))", "np.uint8) import matplotlib.pyplot as plt plt.figure(\"Image\") # 这里必须加 cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话)", "= np.max(xs) min_ys = np.min(ys) max_ys = np.max(ys) weakly_label_mask[min_xs: max_xs,", "mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32)", "for name in names: cur_slice_dir = os.path.join(image_dir, sub_name, name) execute_func(cur_slice_dir,", "cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size, kernel_size)) close_r = cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE, kernel) return close_r", "', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) cv2.imwrite(save_path, mhd_image * 255) xml_save_dir =", "', np.shape(mask_image)) if suffix_name == 'jpg': mhd_images = np.transpose(np.asarray(mhd_images, np.float32),", "!= -1: return phasename # 读取DICOM文件中包含的病例ID信息 def read_patientId(dicom_file_path): ds =", "mhd_mask_path = glob(os.path.join(slice_dir, phase_name + '_Mask*.mhd'))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image", "2: 209128, 3: 1486676, 4: 458278, 5: 705482} # {0:", "0, 4: 0, 5: 0 } from convert2jpg import extract_bboxs_mask_from_mask", "== 0: slice_num = 1 seg = read_nii(seg_path) # print", "mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) #cv2.imwrite(save_path, mhd_image * 255)", "image -= minval # scale down to 0 - 2", "ImageDraw import gc from skimage.morphology import disk, dilation import nipy", "500. s_maximum = 255. image = np.clip(image, c_minimum, c_maximum) interv", "masks.append(binary_seg_slice) labeled_mask = label(binary_seg_slice) weakly_label_mask = np.zeros_like(binary_seg_slice, np.uint8) for idx", "遍历MICCAI2018文件夹的框架 :param execute_func: :return: ''' for sub_name in ['train', 'val',", "2]) volume = np.asarray(volume, np.float32) max_v = 250. min_v =", "suffix_name == 'jpg': mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0])", "* 255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml') if not os.path.exists(xml_save_dir):", "int(z_axis / z_axis_case) if slice_num == 0: slice_num = 1", "1 res['HCC'] = 2 res['HEM'] = 3 res['METS'] = 4", "load_image_obj.header x_size = header['srow_x'][0] y_size = header['srow_y'][1] z_size = header['srow_z'][2]", "target_phase))[0] mask_image = read_mhd_image(mhd_mask_path) min_xs, min_ys, max_xs, max_ys, names, mask", "np.asarray(masks, np.uint8), np.asarray(liver_masks, np.uint8), np.asarray( tumor_weakly_masks, np.uint8) def statics_num_slices_lesion(nii_dir): '''", "cur_slice_dir = os.path.join(image_dir, sub_name, name) mhd_mask_path = glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd' %", "np.mean(next_slice, axis=0, keepdims=True) imgs.append( np.transpose(np.concatenate([pre_slice, mid_slice, next_slice], axis=0), axes=[1, 2,", "def read_dicom_file(file_name): header = pydicom.read_file(file_name) image = header.pixel_array image =", "np.clip(image, c_minimum, c_maximum) interv = float(c_maximum - c_minimum) image =", "np.max(binary_seg_slice) masks.append(binary_seg_slice) labeled_mask = label(binary_seg_slice) weakly_label_mask = np.zeros_like(binary_seg_slice, np.uint8) for", "img_obj.header res_dict = {} res_dict['voxel_spacing'] = [header_obj['srow_x'][0], header_obj['srow_y'][1], header_obj['srow_z'][2]] img_arr", "= load_image_obj.header x_size = header['srow_x'][0] y_size = header['srow_y'][1] z_size =", "slice_num = 1 seg = read_nii(seg_path) # print np.shape(volume), np.shape(seg)", "file_name+'.jpg') if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image is", "'/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' # save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' # phase_name = 'NC' #", "header.RescaleSlope * image + header.RescaleIntercept return image # 读取mhd文件 def", "= image_arr image_arr_rgb[:, :, 1] = image_arr image_arr_rgb[:, :, 2]", "[] i = slice_num + 1 pos_slice_num = np.sum(np.sum(np.sum(seg ==", "dilation(mask_image, disk(r)) ''' 将形式如(512, 512)格式的图像转化为(1, 512, 512)形式的图片 ''' def expand23D(mask_image):", "''' 将[d, w, h]reshape为[w, h, d] ''' def convert2depthlastest(image): image", "res['CYST'] = 0 res['FNH'] = 1 res['HCC'] = 2 res['HEM']", "image = np.array(itk.GetArrayFromImage(header)) if rejust: image[image < -70] = -70", "pos_slice_num) / total_slice_num # 正样本是负样本的 if neg_rate > 1.0: neg_rate", "209128, 3: 1486676, 4: 458278, 5: 705482} # {0: 1.0,", "shape = np.shape(image) new_image = np.zeros([shape[1], shape[2], shape[0]]) for i", "max_x) print(line) lines = [] lines.append(line) with open(gt_save_path, 'w') as", "phase_name))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image),", "idx in range(1, np.max(labeled_mask) + 1): xs, ys = np.where(labeled_mask", "= image / interv image = image * 2.0 #", "image_arr + 70 shape = list(np.shape(image_arr)) image_arr_rgb = np.zeros(shape=[shape[0], shape[1],", "= [] liver_masks = [] i = slice_num + 1", "interv = max_v - min_v volume = (volume - min_v)", "np.concatenate([mhd_image, mhd_image, mhd_image], axis=2) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) max_v =", "i - slice_num / 2 # pre_end = i #", "from scipy import ndimage image = ndimage.binary_fill_holes(image).astype(np.uint8) return image def", "os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) phase_name = ''.join(phasenames) save_path = os.path.join(save_dir,", "for value in list(pixel_value_set): static_res[value] += np.sum(mask == value) print(static_res)", "= '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' # save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' # phase_name = 'NC'", "save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' # phase_name = 'NC' # MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase,", "doc.appendChild(root_node) folder_name = os.path.basename(save_dir) + '/' + phase_name folder_node =", "= 1 => biliniear interpolation return after_zoom def preprocessing_agumentation(image, size_training):", "= doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node =", "save_path) # 读取单个DICOM文件 def read_dicom_file(file_name): header = pydicom.read_file(file_name) image =", "= header['srow_x'][0] y_size = header['srow_y'][1] z_size = header['srow_z'][2] return [x_size,", "nameid_dict[typename] # 填充图像 def fill_region(image): # image.show() from scipy import", "def open_operation(slice_image, kernel_size=3): opening = cv2.morphologyEx(slice_image, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)))", "= np.expand_dims(volume[:, :, i], axis=0) else: pre_slice = np.expand_dims(volume[:, :,", "height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node)", "next_start = i # for j in range(1, slice_num +", "% phase_name))[0] mhd_mask_path = glob(os.path.join(slice_dir, 'Mask_%s*.mhd' % phase_name))[0] mhd_image =", "= -200. # max_v = 180 # min_v = -70", "Image.fromarray(np.asarray(image, np.uint8)) return image.resize((size, size)) # def image_expand(mask_image, r): #", "xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node)", "*parameters): ''' 遍历MICCAI2018文件夹的框架 :param execute_func: :return: ''' for sub_name in", "0]) mhd_image = mhd_images else: print('the suffix name does not", "is ', np.shape(mask_image)) if suffix_name == 'jpg': mhd_images = np.transpose(np.asarray(mhd_images,", "for index, y in enumerate(ys): image_draw.point([xs[index], y], fill=(255, 0, 0))", "doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name = file_name + '.jpg' filename_node = doc.createElement('filename')", "= np.expand_dims(mhd_image, axis=2) mhd_image = np.concatenate([mhd_image, mhd_image, mhd_image], axis=2) mask_image", ":return: ''' from skimage.measure import label volume, header = read_nii_with_header(volume_path)", "+ 1:, :] = 0 kernel_bottom = np.copy(kernel_whole) kernel_bottom[:half_size, :]", "return image def close_operation(binary_image, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size, kernel_size))", "i], axis=0) pre_slice = [] # pre_end = i -", "0].shape, dtype=np.float) print(zooms) after_zoom = np.zeros([size_training, size_training, np.shape(image)[2]]) for i", "[size_training, size_training] desired_size = np.asarray(desired_size, dtype=np.int) zooms = desired_size /", "np.max(ys) weakly_label_mask[min_xs: max_xs, min_ys: max_ys] = 1 liver_masks.append(np.asarray(seg_slice == 1,", "value是typeid def return_type_nameid(): res = {} res['CYST'] = 0 res['FNH']", "cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img, cmap='gray') plt.axis('on') plt.title('image') plt.show() # 将图像画出来,并且画出标记的病灶 def", "c_minimum) / interv * s_maximum minval = 0. maxval =", "in names: # path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name) # image =", "find_significant_layer(mask_image): ''' 找到显著层 :param mask_image: [depth, width, height] :return: idx", "z_axis_case) if slice_num == 0: slice_num = 1 seg =", "if file_path.endswith('.mhd'): return read_mhd_image(file_path) print('the format of image is not", "kernel_size=5): import cv2 import numpy as np kernel = np.ones((kernel_size,", "phase_name in phasenames: mhd_image_path = glob(os.path.join(slice_dir, 'Image_%s*.mhd' % phase_name))[0] mhd_mask_path", "# def image_expand(mask_image, r): # return dilation(mask_image, disk(r)) ''' 将形式如(512,", "file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) save_path = os.path.join(save_dir, phase_name,", "in phasenames: mhd_image_path = glob(os.path.join(slice_dir, 'Image_%s*.mhd' % phase_name))[0] mhd_mask_path =", "''' 找到显著层 :param mask_image: [depth, width, height] :return: idx '''", "root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image)) size_node =", "in mask_nii_paths: mask_img = read_nii(mask_nii_path) has_lesion = np.asarray(np.sum(np.sum(mask_img == 2,", "max_x = np.max(xs) max_y = np.max(ys) object_node = doc.createElement('object') root_node.appendChild(object_node)", "= [] tumor_weakly_masks = [] liver_masks = [] i =", "Image.fromarray(np.asarray(ROI, np.uint8)) for index, y in enumerate(ys): image_draw.point([xs[index], y], fill=(255,", "itk.WriteImage(header, file_name) # 根据文件名返回期项名 def return_phasename(file_name): phasenames = ['NC', 'ART',", "phasename in ['NC', 'ART', 'PV']: # convert_dicomseries2mhd( # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' +", "print('static scaler 0', interv) # scale down to 0 -", "= np.max(xs) max_y = np.max(ys) object_node = doc.createElement('object') root_node.appendChild(object_node) name_node", "z = next_start + j # if z >= channel:", "np.expand_dims(volume[:, :, i], axis=0) else: pre_slice = np.expand_dims(volume[:, :, i-1],", "= header.RescaleSlope * image + header.RescaleIntercept return image # 读取mhd文件", "0, 2: 0, 3: 0, 4: 0, 5: 0 }", "30]) # after_zeros = convert2depthfirst(zeros) # print np.shape(after_zeros) # test_convert2depthfirst()", "= reader.Execute() image_array = itk.GetArrayFromImage(images) return image_array # 将DICOM序列转化成MHD文件 def", "4: 458278, 5: 705482} # {0: 1.0, 156, 1041, 146,", "new_image = np.zeros([shape[2], shape[0], shape[1]]) for i in range(shape[2]): new_image[i,", "min_v] = min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image -= np.mean(mhd_image) min_v =", "= np.expand_dims(volume[:, :, i-1], axis=0) next_slice = [] # next_start", "mask_image): ''' 提取一幅图像中的ROI ''' xs, ys = np.where(mask_image == 1)", "2: 0, 3: 0, 4: 0, 5: 0 } from", "np.transpose(volume, [1, 0, 2]) volume = np.asarray(volume, np.float32) max_v =", "j # if z >= channel: # z = channel", "np.expand_dims(kernel_right, axis=0), np.expand_dims(kernel_top, axis=0), np.expand_dims(kernel_bottom, axis=0), ], axis=0) def image_erode(img,", "c_minimum) image = (image - c_minimum) / interv * s_maximum", "half_size = kernel_size // 2 kernel_left = np.copy(kernel_whole) kernel_left[:, half_size", "skimage.measure import label volume, header = read_nii_with_header(volume_path) # volume =", "erosion def image_expand(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image", "image = ndimage.binary_fill_holes(image).astype(np.uint8) return image def close_operation(binary_image, kernel_size=5): kernel =", "np.max(ys) minx = np.min(xs) maxx = np.max(xs) ROI = image_arr_rgb[miny", "not support in this version') return None def processing(image, size_training):", "mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_image = np.expand_dims(mhd_image, axis=2) mhd_image =", ":, i] = scipy.ndimage.zoom(image[:, :, i], zooms, order=1) # order", "np.max(labeled_mask) + 1): xs, ys = np.where(labeled_mask == idx) min_xs", "target_mask = mask_image print(np.shape(mhd_images)) mask_image = target_mask mask_image_shape = list(np.shape(mask_image))", "j # if z < 0: # z = 0", "180 image_arr = image_arr + 70 shape = list(np.shape(image_arr)) image_arr_rgb", "nipy.load_image(file_path) header = load_image_obj.header x_size = header['srow_x'][0] y_size = header['srow_y'][1]", "def return_typeid_byname(typename): nameid_dict = return_type_nameid() return nameid_dict[typename] # 填充图像 def", "header = itk.GetImageFromArray(image) itk.WriteImage(header, file_name) # 根据文件名返回期项名 def return_phasename(file_name): phasenames", "mask_image = np.asarray(np.squeeze(mask_image), np.uint8) if phase_name == target_phase: target_mask =", "to 0 - 2 # image /= (interv / 2)", ":] ROI_Image = Image.fromarray(np.asarray(ROI, np.uint8)) for index, y in enumerate(ys):", "= np.mean(next_slice, axis=0, keepdims=True) imgs.append( np.transpose(np.concatenate([pre_slice, mid_slice, next_slice], axis=0), axes=[1,", "= list(np.shape(mask_image)) if len(shape) == 2: mask_image = np.expand_dims(mask_image, axis=0)", "doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image)) size_node = doc.createElement('size') root_node.appendChild(size_node) width_node", "= target_mask mask_image_shape = list(np.shape(mask_image)) if len(mask_image_shape) == 3: mask_image", "= type2pixel[pixel2type[key]][0] pixel_value_set = np.unique(mask) print pixel_value_set for value in", "min_xs = np.min(xs) max_xs = np.max(xs) min_ys = np.min(ys) max_ys", "min_x = np.min(xs) min_y = np.min(ys) max_x = np.max(xs) max_y", "from PIL import Image, ImageDraw import gc from skimage.morphology import", "''' sum_res = np.sum(np.sum(mask_image, axis=1), axis=1) return np.argmax(sum_res) # 将一个矩阵保存为图片", "== 'npy': mhd_images = np.concatenate(np.asarray(mhd_images, np.float), axis=0) mhd_images = np.transpose(np.asarray(mhd_images,", "execute_func(cur_slice_dir, *parameters) def dicom2jpg_singlephase(slice_dir, save_dir, phase_name='PV'): mhd_image_path = glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0]", "np.sum(np.sum(np.sum(seg == 2, axis=0), axis=0) != 0) total_slice_num = np.shape(seg)[-1]", "# convert_dicomseries2mhd( # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename, # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename", "= mask_image print(np.shape(mhd_images)) mask_image = target_mask mask_image_shape = list(np.shape(mask_image)) if", "+ '.mhd' # ) # names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') # for", "np.expand_dims(kernel_bottom, axis=0), ], axis=0) def image_erode(img, kernel_size=5): import cv2 import", "返回病灶类型ID和名称的字典类型的数据 key是typeid value是typename def return_type_idname(): res = {} res[0] =", "return after_zoom def MICCAI2018_Iterator(image_dir, execute_func, *parameters): ''' 遍历MICCAI2018文件夹的框架 :param execute_func:", "minx - 1:maxx + 1, :] ROI_Image = Image.fromarray(np.asarray(ROI, np.uint8))", "nipy.load_image(file_path) header_obj = img_obj.header res_dict = {} res_dict['voxel_spacing'] = [header_obj['srow_x'][0],", "ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax')", "+ 1] def resize_image(image, size): image = Image.fromarray(np.asarray(image, np.uint8)) return", "# 正样本是负样本的 if neg_rate > 1.0: neg_rate = 1.0 for", "image[xs_min: xs_max + 1, ys_min: ys_max + 1] def resize_image(image,", "'.jpg' filename_node = doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape", "= None mhd_images = [] for phase_name in phasenames: mhd_image_path", "os.path.basename(os.path.dirname(slice_dir)) phase_name = ''.join(phasenames) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.'", "image_arr_rgb[:, :, 1] = image_arr image_arr_rgb[:, :, 2] = image_arr", "300. min_v = -350. mhd_image[mhd_image > max_v] = max_v mhd_image[mhd_image", "+ 1): # z = next_start + j # if", "< -70] = -70 image[image > 180] = 180 image", "/ 2) image = np.asarray(image, np.float32) image = image /", "import os from glob import glob import scipy import cv2", "def static_pixel_num(image_dir, target_phase='PV'): # {0: 217784361, 1: 1392043, 2: 209128,", "kernel) return image # 图像膨胀 # def image_expand(image, size): #", "== value) print(static_res) def convertCase2PNGs(volume_path, seg_path, save_dir=None, z_axis=5.0, short_edge=64): '''", "has_lesion = np.asarray(np.sum(np.sum(mask_img == 2, axis=0), axis=0)>0, np.bool) num_lesion_slices =", "= np.array(image) # numpy_clip c_minimum = -300. c_maximum = 500.", "= os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') # for name in names: # path =", "= label(binary_seg_slice) weakly_label_mask = np.zeros_like(binary_seg_slice, np.uint8) for idx in range(1,", "255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml') if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir)", "+ '.jpg' filename_node = doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node)", "min_ys, max_xs, max_ys, names, mask = extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir, 'tumor_types')) for", "desired_size / np.array(image[:, :, 0].shape, dtype=np.float) print(zooms) after_zoom = np.zeros([size_training,", "doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image != 1] = 0", "= os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name) # image = read_nil(path) # print(np.shape(image)) #", "= np.zeros(shape=[shape[0], shape[1], 3]) image_arr_rgb[:, :, 0] = image_arr image_arr_rgb[:,", "= header.pixel_array image = header.RescaleSlope * image + header.RescaleIntercept return", "= np.asarray(image, np.uint8) import matplotlib.pyplot as plt plt.figure(\"Image\") # 这里必须加", "Image.fromarray(np.asarray(image_arr_rgb, np.uint8)) image_draw = ImageDraw.Draw(image) [ys, xs] = np.where(mask_image !=", "np.expand_dims(mask_image, axis=0) print('after expand23D', np.shape(mask_image)) return mask_image ''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 '''", "range(1, slice_num + 1): # z = pre_end - j", "!= 0) miny = np.min(ys) maxy = np.max(ys) minx =", "''' 统计每个case,有多少slice具有病灶 :param nii_dir: :return: ''' mask_nii_paths = glob(os.path.join(nii_dir, 'segmentation-*.nii'))", "file_name + '.txt') # for evulate doc = Document() root_node", "i # for j in range(1, slice_num + 1): #", "np.expand_dims(kernel_whole, axis=0), np.expand_dims(kernel_left, axis=0), np.expand_dims(kernel_right, axis=0), np.expand_dims(kernel_top, axis=0), np.expand_dims(kernel_bottom, axis=0),", "numpy_clip c_minimum = -300. c_maximum = 500. s_maximum = 255.", "return np.array(image) # 保存mhd文件 def save_mhd_image(image, file_name): header = itk.GetImageFromArray(image)", "= np.asarray(image, np.float32) image = image / interv image =", "np.min(ys) ys_max = np.max(ys) return image[xs_min: xs_max + 1, ys_min:", "interv) # scale down to 0 - 2 # image", "''.join(phasenames) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.' + suffix_name) if", "'/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename + '.mhd' # ) # names =", "'jpg': mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image =", "img_arr = img_obj.get_data() return img_arr, res_dict # 读取文件序列 def read_dicom_series(dir_name):", "ys_min = np.min(ys) ys_max = np.max(ys) return image[xs_min: xs_max +", "shape = list(np.shape(mhd_image)) size_node = doc.createElement('size') root_node.appendChild(size_node) width_node = doc.createElement('width')", "os.path.basename(save_path).split('.')[0] + '_ROI.jpg')) del image, ROI_Image gc.collect() def compress22dim(image): '''", "plt.figure(\"Image\") # 这里必须加 cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img, cmap='gray') plt.axis('on') plt.title('image') plt.show()", "header = load_image_obj.header x_size = header['srow_x'][0] y_size = header['srow_y'][1] z_size", "def find_significant_layer(mask_image): ''' 找到显著层 :param mask_image: [depth, width, height] :return:", "= np.array(image) shape = np.shape(image) new_image = np.zeros([shape[2], shape[0], shape[1]])", "= np.concatenate([mhd_image, mhd_image, mhd_image], axis=2) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) max_v", "< min_v] = min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image -= np.mean(mhd_image) min_v", "[_, _, channel] = np.shape(volume) imgs = [] names =", "%d %d %d %d\\n' % ('Cyst', min_y, min_x, max_y, max_x)", "after_zoom[:, :, i] = scipy.ndimage.zoom(image[:, :, i], zooms, order=1) #", "kernel_right = np.copy(kernel_whole) kernel_right[:, :half_size] = 0 kernel_top = np.copy(kernel_whole)", "is None: image.show() else: image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0] + '_ROI.jpg')) del", "load_image_obj = nipy.load_image(file_path) header = load_image_obj.header x_size = header['srow_x'][0] y_size", "axis=0), np.expand_dims(kernel_bottom, axis=0), ], axis=0) def image_erode(img, kernel_size=5): import cv2", "else: print('the suffix name does not support') assert False max_v", "+ j # if z >= channel: # z =", "1486676, 4: 458278, 5: 705482} # {0: 1.0, 156, 1041,", "np.float32), np.asarray(masks, np.uint8), np.asarray(liver_masks, np.uint8), np.asarray( tumor_weakly_masks, np.uint8) def statics_num_slices_lesion(nii_dir):", "shape = list(np.shape(image)) if len(shape) == 3: return np.squeeze(image) return", "= img_obj.get_data() return img_arr, res_dict # 读取文件序列 def read_dicom_series(dir_name): reader", "save_dir, phase_name) # conver2JPG multi phase # image_dir = '/home/give/Documents/dataset/LiverLesionDetection_Splited/0'", "def find_centroid3D(image, flag): [x, y, z] = np.where(image == flag)", "= np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image = mhd_images else:", "- slice_num / 2 # pre_end = i # for", "[] # next_start = i + slice_num / 2 #", "= header['srow_y'][1] z_size = header['srow_z'][2] return [x_size, y_size, z_size] def", "min_v) / interv z_axis_case = header['voxel_spacing'][-1] slice_num = int(z_axis /", "[depth, width, height] :return: idx ''' sum_res = np.sum(np.sum(mask_image, axis=1),", "return np.concatenate([ np.expand_dims(kernel_whole, axis=0), np.expand_dims(kernel_left, axis=0), np.expand_dims(kernel_right, axis=0), np.expand_dims(kernel_top, axis=0),", "in enumerate(ys): image_draw.point([xs[index], y], fill=(255, 0, 0)) if save_path is", "header['srow_y'][1] z_size = header['srow_z'][2] return [x_size, y_size, z_size] def read_nii(file_path):", "target_mask mask_image_shape = list(np.shape(mask_image)) if len(mask_image_shape) == 3: mask_image =", "axis=0) pre_slice = [] # pre_end = i - slice_num", "total_slice_num = np.shape(seg)[-1] print('pos_slice_num is ', pos_slice_num, total_slice_num) neg_rate =", "读取DICOM文件中包含的病例ID信息 def read_patientId(dicom_file_path): ds = pydicom.read_file(dicom_file_path) return ds.PatientID # 返回病灶类型和ID的字典类型的数据", "+ 1, minx - 1:maxx + 1, :] ROI_Image =", "''' def convert2depthlastest(image): image = np.array(image) shape = np.shape(image) new_image", "in names: cur_slice_dir = os.path.join(image_dir, sub_name, name) mhd_mask_path = glob(os.path.join(cur_slice_dir,", "// 2 kernel_left = np.copy(kernel_whole) kernel_left[:, half_size + 1:] =", "return image_array # 将DICOM序列转化成MHD文件 def convert_dicomseries2mhd(dicom_series_dir, save_path): data = read_dicom_series(dicom_series_dir)", "j in range(1, slice_num + 1): # z = pre_end", "= file_name + '.jpg' filename_node = doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node =", "=> biliniear interpolation return after_zoom def preprocessing_agumentation(image, size_training): image =", "= 0 kernel_bottom = np.copy(kernel_whole) kernel_bottom[:half_size, :] = 0 return", "top = 500. image = np.clip(image, bottom, top) # to", "for name in names: cur_slice_dir = os.path.join(image_dir, sub_name, name) mhd_mask_path", "label(binary_seg_slice) weakly_label_mask = np.zeros_like(binary_seg_slice, np.uint8) for idx in range(1, np.max(labeled_mask)", "read_mhd_image(file_path, rejust=False): header = itk.ReadImage(file_path) image = np.array(itk.GetArrayFromImage(header)) if rejust:", "size_training] desired_size = np.asarray(desired_size, dtype=np.int) zooms = desired_size / np.array(image[:,", "min_xs, min_ys, max_xs, max_ys, names, mask = extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir, 'tumor_types'))", "in range(1, slice_num + 1): # z = next_start +", "return nipy.load_image(file_path).get_data() def read_nii_with_header(file_path): img_obj = nipy.load_image(file_path) header_obj = img_obj.header", "phasenames = ['NC', 'ART', 'PV'] for phasename in phasenames: if", "support in this version') return None def processing(image, size_training): image", "after_zoom def MICCAI2018_Iterator(image_dir, execute_func, *parameters): ''' 遍历MICCAI2018文件夹的框架 :param execute_func: :return:", "bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb') as", "maxval = 255. image -= minval interv = maxval -", "if file_path.endswith('.nii'): return read_nil(file_path) if file_path.endswith('.mhd'): return read_mhd_image(file_path) print('the format", "import nipy import os from glob import glob import scipy", ":, i], axis=0) else: next_slice = np.expand_dims(volume[:, :, i+1], axis=0)", "+ '-' + str(i)) binary_seg_slice = np.asarray(seg_slice == 2, np.uint8)", "# for evulate doc = Document() root_node = doc.createElement('annotation') doc.appendChild(root_node)", "numpy as np from PIL import Image, ImageDraw import gc", "open(xml_save_path, 'wb') as f: f.write(doc.toprettyxml(indent='\\t', encoding='utf-8')) line = '%s %d", "1: 0, 2: 0, 3: 0, 4: 0, 5: 0", "np.expand_dims(kernel_left, axis=0), np.expand_dims(kernel_right, axis=0), np.expand_dims(kernel_top, axis=0), np.expand_dims(kernel_bottom, axis=0), ], axis=0)", "volume[volume < min_v] = min_v volume -= np.mean(volume) min_v =", "'METS' return res # 根据病灶类型的ID返回类型的字符串 def return_typename_byid(typeid): idname_dict = return_type_idname()", "root_node.appendChild(object_node) name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node)", "size)) # def image_expand(mask_image, r): # return dilation(mask_image, disk(r)) '''", "name in names: cur_slice_dir = os.path.join(image_dir, sub_name, name) execute_func(cur_slice_dir, *parameters)", "+ 70 shape = list(np.shape(image_arr)) image_arr_rgb = np.zeros(shape=[shape[0], shape[1], 3])", "== '__main__': # for phasename in ['NC', 'ART', 'PV']: #", "< 0: # z = 0 # pre_slice.append(volume[:, :, z])", "(i - 1) < 0: pre_slice = np.expand_dims(volume[:, :, i],", "> 1.0: neg_rate = 1.0 for i in range(channel): seg_slice", "= doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image))", "np.asarray(liver_masks, np.uint8), np.asarray( tumor_weakly_masks, np.uint8) def statics_num_slices_lesion(nii_dir): ''' 统计每个case,有多少slice具有病灶 :param", "volume = (volume - min_v) / interv z_axis_case = header['voxel_spacing'][-1]", "for i in range(shape[0]): new_image[:, :, i] = image[i, :,", "np.uint8), np.asarray(liver_masks, np.uint8), np.asarray( tumor_weakly_masks, np.uint8) def statics_num_slices_lesion(nii_dir): ''' 统计每个case,有多少slice具有病灶", "475, 308} static_res = { 0: 0, 1: 0, 2:", "image_expand(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.dilate(img,", "'.xml') gt_save_path = os.path.join(evulate_gt_dir, file_name + '.txt') # for evulate", "= channel - 1 # next_slice.append(volume[:, :, z]) if (i", "z]) if (i + 1) >= channel: next_slice = np.expand_dims(volume[:,", "import numpy as np kernel = np.ones((kernel_size, kernel_size), np.uint8) erosion", "read_nii(seg_path) # print np.shape(volume), np.shape(seg) [_, _, channel] = np.shape(volume)", "- 1:maxy + 1, minx - 1:maxx + 1, :]", "kernel_size))) return opening def get_kernel_filters(kernel_size): ''' 返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param kernel_size:", "res # 根据病灶类型的ID返回类型的字符串 def return_typename_byid(typeid): idname_dict = return_type_idname() return idname_dict[typeid]", "1, np.uint8)) tumor_weakly_masks.append(weakly_label_mask) # i += 1 return np.asarray(imgs, np.float32),", "2) # zoom desired_size = [size_training, size_training] desired_size = np.asarray(desired_size,", "kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size, kernel_size)) close_r = cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE,", "1.0, 156, 1041, 146, 475, 308} static_res = { 0:", "= np.min(xs) xs_max = np.max(xs) ys_min = np.min(ys) ys_max =", "header_obj['srow_z'][2]] img_arr = img_obj.get_data() return img_arr, res_dict # 读取文件序列 def", "''' for sub_name in ['train', 'val', 'test']: names = os.listdir(os.path.join(image_dir,", "0]) mhd_image = mhd_images elif suffix_name == 'npy': mhd_images =", "minval = -350 interv = 500 - (-350) image -=", "shape[2], shape[0]]) for i in range(shape[0]): new_image[:, :, i] =", ":param mask_image: [depth, width, height] :return: idx ''' sum_res =", "mhd_image_path = glob(os.path.join(slice_dir, 'Image_%s*.mhd' % phase_name))[0] mhd_mask_path = glob(os.path.join(slice_dir, 'Mask_%s*.mhd'", "in range(1, slice_num + 1): # z = pre_end -", "if save_path is None: image.show() else: image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0] +", "= np.shape(image) new_image = np.zeros([shape[2], shape[0], shape[1]]) for i in", "找到显著层 :param mask_image: [depth, width, height] :return: idx ''' sum_res", "image / interv image = image * 2.0 # zoom", "i], axis=0) else: next_slice = np.expand_dims(volume[:, :, i+1], axis=0) #", "phase_name, dataset_name+'_xml') if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir, phase_name,", "volume = np.transpose(volume, [1, 0, 2]) volume = np.asarray(volume, np.float32)", "return image def image_erode(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))", "= np.asarray(desired_size, dtype=np.int) zooms = desired_size / np.array(image[:, :, 0].shape,", "desired_size = np.asarray(desired_size, dtype=np.int) zooms = desired_size / np.array(image[:, :,", "= -350 interv = 500 - (-350) image -= minval", "= 0 xs, ys = np.where(mask_image == 1) min_x =", "mask_image, save_path): image_arr[image_arr < -70] = -70 image_arr[image_arr > 180]", "close_operation(binary_image, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size, kernel_size)) close_r = cv2.morphologyEx(binary_image,", "= [] # pre_end = i - slice_num / 2", "z = pre_end - j # if z < 0:", "'HEM', 'METS'] typeids = [0, 1, 2, 3, 4] def", "phasename + '.mhd' # ) # names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') #", "'val', 'test']: names = os.listdir(os.path.join(image_dir, sub_name)) for name in names:", "ys_max + 1] def resize_image(image, size): image = Image.fromarray(np.asarray(image, np.uint8))", "= 500. image = np.clip(image, bottom, top) # to float", "= glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0] mhd_mask_path = glob(os.path.join(slice_dir, phase_name + '_Mask*.mhd'))[0] mhd_image", "biliniear interpolation return after_zoom def MICCAI2018_Iterator(image_dir, execute_func, *parameters): ''' 遍历MICCAI2018文件夹的框架", "i] mid_slice = np.expand_dims(volume[:, :, i], axis=0) pre_slice = []", "for name in names: # path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name) #", "# 将图像画出来,并且画出标记的病灶 def save_image_with_mask(image_arr, mask_image, save_path): image_arr[image_arr < -70] =", "np.min(mhd_image), np.max(mhd_image)) #cv2.imwrite(save_path, mhd_image * 255) np.save(save_path, mhd_image * 255)", "x_size = header['srow_x'][0] y_size = header['srow_y'][1] z_size = header['srow_z'][2] return", "plt.imshow(img, cmap='gray') plt.axis('on') plt.title('image') plt.show() # 将图像画出来,并且画出标记的病灶 def save_image_with_mask(image_arr, mask_image,", "print(static_res) def convertCase2PNGs(volume_path, seg_path, save_dir=None, z_axis=5.0, short_edge=64): ''' 将nii转化成PNG :param", "os.path.join(evulate_gt_dir, file_name + '.txt') # for evulate doc = Document()", "if len(mask_image_shape) == 3: mask_image = mask_image[1, :, :] print('the", "'Mask_%s*.mhd' % target_phase))[0] mask_image = read_mhd_image(mhd_mask_path) min_xs, min_ys, max_xs, max_ys,", "target_mask = None mhd_images = [] for phase_name in phasenames:", "return_type_idname() return idname_dict[typeid] # 根据病灶类型的name返回id的字符串 def return_typeid_byname(typename): nameid_dict = return_type_nameid()", "0: # z = 0 # pre_slice.append(volume[:, :, z]) if", "z_size] def read_nii(file_path): return nipy.load_image(file_path).get_data() def read_nii_with_header(file_path): img_obj = nipy.load_image(file_path)", "if slice_num == 0: slice_num = 1 seg = read_nii(seg_path)", "h]reshape为[w, h, d] ''' def convert2depthlastest(image): image = np.array(image) shape", "tumor_weakly_masks.append(weakly_label_mask) # i += 1 return np.asarray(imgs, np.float32), np.asarray(masks, np.uint8),", "total_slice_num # 正样本是负样本的 if neg_rate > 1.0: neg_rate = 1.0", "'FNH', 'HCC', 'HEM', 'METS'] typeids = [0, 1, 2, 3,", "as np from PIL import Image, ImageDraw import gc from", "axes=[1, 2, 0]) mhd_image = mhd_images else: print('the suffix name", "', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) #cv2.imwrite(save_path, mhd_image * 255) np.save(save_path, mhd_image", "doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb') as f: f.write(doc.toprettyxml(indent='\\t', encoding='utf-8'))", "image def close_operation(binary_image, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size, kernel_size)) close_r", "size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image != 1] = 0 xs, ys =", "i+1], axis=0) # pre_slice = np.mean(pre_slice, axis=0, keepdims=True) # next_slice", "key in pixel2type.keys(): mask[mask == key] = type2pixel[pixel2type[key]][0] pixel_value_set =", "key是typeid value是typename def return_type_idname(): res = {} res[0] = 'CYST'", "res['HEM'] = 3 res['METS'] = 4 return res # 返回病灶类型ID和名称的字典类型的数据", "np.uint8) # print np.max(binary_seg_slice) masks.append(binary_seg_slice) labeled_mask = label(binary_seg_slice) weakly_label_mask =", "0)) if save_path is None: image.show() else: image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0]", "+ header.RescaleIntercept return image # 读取mhd文件 def read_mhd_image(file_path, rejust=False): header", "w, h] ''' def convert2depthfirst(image): image = np.array(image) shape =", "= 0 return np.concatenate([ np.expand_dims(kernel_whole, axis=0), np.expand_dims(kernel_left, axis=0), np.expand_dims(kernel_right, axis=0),", "in pixel2type.keys(): mask[mask == key] = type2pixel[pixel2type[key]][0] pixel_value_set = np.unique(mask)", "pre_slice = np.mean(pre_slice, axis=0, keepdims=True) # next_slice = np.mean(next_slice, axis=0,", "max_v = 300. min_v = -350. mhd_image[mhd_image > max_v] =", "names = [] masks = [] tumor_weakly_masks = [] liver_masks", "phasenames: mhd_image_path = glob(os.path.join(slice_dir, 'Image_%s*.mhd' % phase_name))[0] mhd_mask_path = glob(os.path.join(slice_dir,", "# 将DICOM序列转化成MHD文件 def convert_dicomseries2mhd(dicom_series_dir, save_path): data = read_dicom_series(dicom_series_dir) save_mhd_image(data, save_path)", "'NC' # MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir, phase_name) # conver2JPG multi phase", "cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))) return opening def get_kernel_filters(kernel_size): ''' 返回进行kernel操作的5个模版", "ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb') as f:", "= mask_image[1, :, :] print('the mask image shape is ',", "doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin')", "f.close() def static_pixel_num(image_dir, target_phase='PV'): # {0: 217784361, 1: 1392043, 2:", "cv2.MORPH_CLOSE, kernel) return close_r def open_operation(slice_image, kernel_size=3): opening = cv2.morphologyEx(slice_image,", "0. maxval = 255. image -= minval interv = maxval", "erosion = cv2.erode(img, kernel, iterations=1) return erosion def image_expand(img, kernel_size=5):", "1] = image_arr image_arr_rgb[:, :, 2] = image_arr image =", "0, 2]) volume = np.asarray(volume, np.float32) max_v = 250. min_v", "= 'NC' # MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir, phase_name) # conver2JPG multi", "/= (interv / 2) image = np.asarray(image, np.float32) image =", "mhd_image -= np.mean(mhd_image) min_v = np.min(mhd_image) max_v = np.max(mhd_image) interv", "*parameters) def dicom2jpg_singlephase(slice_dir, save_dir, phase_name='PV'): mhd_image_path = glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0] mhd_mask_path", "bottom = -300. top = 500. image = np.clip(image, bottom,", "as f: f.write(doc.toprettyxml(indent='\\t', encoding='utf-8')) line = '%s %d %d %d", "np.max(volume) interv = max_v - min_v volume = (volume -", ":, 0] = image_arr image_arr_rgb[:, :, 1] = image_arr image_arr_rgb[:,", "this version') return None def processing(image, size_training): image = np.array(image)", "# after_zeros = convert2depthfirst(zeros) # print np.shape(after_zeros) # test_convert2depthfirst() '''", "channel: next_slice = np.expand_dims(volume[:, :, i], axis=0) else: next_slice =", "将[w, h, d]reshape为[d, w, h] ''' def convert2depthfirst(image): image =", "1 liver_masks.append(np.asarray(seg_slice == 1, np.uint8)) tumor_weakly_masks.append(weakly_label_mask) # i += 1", "'tumor_types')) for key in pixel2type.keys(): mask[mask == key] = type2pixel[pixel2type[key]][0]", "np.max(xs) ys_min = np.min(ys) ys_max = np.max(ys) return image[xs_min: xs_max", "= 0 # pre_slice.append(volume[:, :, z]) if (i - 1)", "read_nii(mask_nii_path) has_lesion = np.asarray(np.sum(np.sum(mask_img == 2, axis=0), axis=0)>0, np.bool) num_lesion_slices", ":param execute_func: :return: ''' for sub_name in ['train', 'val', 'test']:", "maxy = np.max(ys) minx = np.min(xs) maxx = np.max(xs) ROI", "''' 返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param kernel_size: :return: [5, kernel_size, kernel_size] '''", "import Image, ImageDraw import gc from skimage.morphology import disk, dilation", "= os.path.join(evulate_gt_dir, file_name + '.txt') # for evulate doc =", "doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin')", "image = np.clip(image, c_minimum, c_maximum) interv = float(c_maximum - c_minimum)", "0 kernel_right = np.copy(kernel_whole) kernel_right[:, :half_size] = 0 kernel_top =", "ds = pydicom.read_file(dicom_file_path) return ds.PatientID # 返回病灶类型和ID的字典类型的数据 key是typename value是typeid def", "308} static_res = { 0: 0, 1: 0, 2: 0,", "save_dir=None, z_axis=5.0, short_edge=64): ''' 将nii转化成PNG :param volume_path: nii的路径 :param seg_path:", "object_node.appendChild(name_node) truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node)", "(image - c_minimum) / interv * s_maximum minval = 0.", "= Image.fromarray(np.asarray(image, np.uint8)) return image.resize((size, size)) # def image_expand(mask_image, r):", "not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir, phase_name, dataset_name+'_gt') if not", "= (3.0 * pos_slice_num) / total_slice_num # 正样本是负样本的 if neg_rate", "print pixel_value_set for value in list(pixel_value_set): static_res[value] += np.sum(mask ==", "suffix_name) if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image is", "= np.where(mask_image == 1) xs_min = np.min(xs) xs_max = np.max(xs)", "根据病灶类型的name返回id的字符串 def return_typeid_byname(typename): nameid_dict = return_type_nameid() return nameid_dict[typename] # 填充图像", "kernel) return image def image_erode(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size,", "image def image_erode(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image", "[header_obj['srow_x'][0], header_obj['srow_y'][1], header_obj['srow_z'][2]] img_arr = img_obj.get_data() return img_arr, res_dict #", "truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node)", "= np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image = mhd_images elif", "header.pixel_array image = header.RescaleSlope * image + header.RescaleIntercept return image", "= doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name = file_name", "if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image is ',", "next_slice = np.mean(next_slice, axis=0, keepdims=True) imgs.append( np.transpose(np.concatenate([pre_slice, mid_slice, next_slice], axis=0),", "new_image def read_image_file(file_path): if file_path.endswith('.nii'): return read_nil(file_path) if file_path.endswith('.mhd'): return", "image.show() else: image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0] + '_ROI.jpg')) del image, ROI_Image", "file_name = file_name + '.jpg' filename_node = doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node", "min_y = np.min(ys) max_x = np.max(xs) max_y = np.max(ys) object_node", "def read_image_file(file_path): if file_path.endswith('.nii'): return read_nil(file_path) if file_path.endswith('.mhd'): return read_mhd_image(file_path)", "-70 volume[volume > max_v] = max_v volume[volume < min_v] =", "mask_image[1, :, :] print('the mask image shape is ', np.shape(mask_image))", "+ 1:] = 0 kernel_right = np.copy(kernel_whole) kernel_right[:, :half_size] =", "['NC', 'ART', 'PV']: # convert_dicomseries2mhd( # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename, #", ") # names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') # for name in names:", "save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.' + suffix_name) if not", "= doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb') as f: f.write(doc.toprettyxml(indent='\\t',", "= [0, 1, 2, 3, 4] def get_voxel_size(file_path): load_image_obj =", ":, 0].shape, dtype=np.float) print(zooms) after_zoom = np.zeros([size_training, size_training, np.shape(image)[2]]) for", "phasenames=['NC', 'ART', 'PV'], target_phase='PV', suffix_name='npy'): target_mask = None mhd_images =", "range(shape[0]): new_image[:, :, i] = image[i, :, :] return new_image", "= read_nii(mask_nii_path) has_lesion = np.asarray(np.sum(np.sum(mask_img == 2, axis=0), axis=0)>0, np.bool)", "save_path is None: image.show() else: image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0] + '_ROI.jpg'))", ":, i-1], axis=0) next_slice = [] # next_start = i", "= doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth')", "将图像画出来,并且画出标记的病灶 def save_image_with_mask(image_arr, mask_image, save_path): image_arr[image_arr < -70] = -70", "seg_path, save_dir=None, z_axis=5.0, short_edge=64): ''' 将nii转化成PNG :param volume_path: nii的路径 :param", "2 res['HEM'] = 3 res['METS'] = 4 return res #", "将一个矩阵保存为图片 def save_image(image_arr, save_path): image = Image.fromarray(np.asarray(image_arr, np.uint8)) image.save(save_path) def", "> 180] = 180 image_arr = image_arr + 70 shape", "+ '.xml') gt_save_path = os.path.join(evulate_gt_dir, file_name + '.txt') # for", "neg_rate = (3.0 * pos_slice_num) / total_slice_num # 正样本是负样本的 if", "axis=0)>0, np.bool) num_lesion_slices = np.sum(has_lesion) print os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1] if", "'w') as f: f.writelines(lines) f.close() def static_pixel_num(image_dir, target_phase='PV'): # {0:", "np.max(ys) object_node = doc.createElement('object') root_node.appendChild(object_node) name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node)", "4 return res # 返回病灶类型ID和名称的字典类型的数据 key是typeid value是typename def return_type_idname(): res", "= np.transpose(volume, [1, 0, 2]) volume = np.asarray(volume, np.float32) max_v", "= ndimage.binary_fill_holes(image).astype(np.uint8) return image def close_operation(binary_image, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_RECT,", "- 2 image /= (interv / 2) # zoom desired_size", "np.expand_dims(kernel_top, axis=0), np.expand_dims(kernel_bottom, axis=0), ], axis=0) def image_erode(img, kernel_size=5): import", "axis=0), np.expand_dims(kernel_left, axis=0), np.expand_dims(kernel_right, axis=0), np.expand_dims(kernel_top, axis=0), np.expand_dims(kernel_bottom, axis=0), ],", "cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.dilate(img, kernel) return image def", "min_v = -350. mhd_image[mhd_image > max_v] = max_v mhd_image[mhd_image <", "= np.min(ys) max_ys = np.max(ys) weakly_label_mask[min_xs: max_xs, min_ys: max_ys] =", "doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image", "print('the format of image is not support in this version')", "import pixel2type, type2pixel for sub_name in ['train', 'val', 'test']: names", "mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_images.append(mhd_image) mask_image =", "= cv2.erode(img, kernel) return image # 图像膨胀 # def image_expand(image,", "1041, 146, 475, 308} static_res = { 0: 0, 1:", "= glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd' % target_phase))[0] mask_image = read_mhd_image(mhd_mask_path) min_xs, min_ys,", "next_slice], axis=0), axes=[1, 2, 0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-' + str(i))", "key是typename value是typeid def return_type_nameid(): res = {} res['CYST'] = 0", "np.uint8) if phase_name == target_phase: target_mask = mask_image print(np.shape(mhd_images)) mask_image", "= [] # next_start = i + slice_num / 2", "close_r = cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE, kernel) return close_r def open_operation(slice_image, kernel_size=3):", "np.concatenate([ np.expand_dims(kernel_whole, axis=0), np.expand_dims(kernel_left, axis=0), np.expand_dims(kernel_right, axis=0), np.expand_dims(kernel_top, axis=0), np.expand_dims(kernel_bottom,", "print np.shape(after_zeros) # test_convert2depthfirst() ''' 将[d, w, h]reshape为[w, h, d]", "with open(xml_save_path, 'wb') as f: f.write(doc.toprettyxml(indent='\\t', encoding='utf-8')) line = '%s", "= Image.fromarray(np.asarray(image_arr_rgb, np.uint8)) image_draw = ImageDraw.Draw(image) [ys, xs] = np.where(mask_image", "# phase_name = 'NC' # MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir, phase_name) #", "= read_nii(seg_path) # print np.shape(volume), np.shape(seg) [_, _, channel] =", "0 - 2 image /= (interv / 2) # zoom", "np.max(ys) return image[xs_min: xs_max + 1, ys_min: ys_max + 1]", "img_obj.get_data() return img_arr, res_dict # 读取文件序列 def read_dicom_series(dir_name): reader =", "image_arr image_arr_rgb[:, :, 2] = image_arr image = Image.fromarray(np.asarray(image_arr_rgb, np.uint8))", "return_type_idname(): res = {} res[0] = 'CYST' res[1] = 'FNH'", "= doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image)) size_node = doc.createElement('size') root_node.appendChild(size_node)", "max_xs, min_ys: max_ys] = 1 liver_masks.append(np.asarray(seg_slice == 1, np.uint8)) tumor_weakly_masks.append(weakly_label_mask)", "np.ones((kernel_size, kernel_size), np.uint8) erosion = cv2.erode(img, kernel, iterations=1) return erosion", "= doc.createElement('annotation') doc.appendChild(root_node) folder_name = os.path.basename(save_dir) + '/' + phase_name", "size_training, np.shape(image)[2]]) for i in range(np.shape(after_zoom)[2]): after_zoom[:, :, i] =", "int(np.mean(y)) centroid_z = int(np.mean(z)) return centroid_x, centroid_y, centroid_z ''' 将[w,", "156, 1041, 146, 475, 308} static_res = { 0: 0,", "reader = itk.ImageSeriesReader() dicom_series = reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series) images = reader.Execute()", "np.shape(after_zeros) # test_convert2depthfirst() ''' 将[d, w, h]reshape为[w, h, d] '''", "res_dict['voxel_spacing'] = [header_obj['srow_x'][0], header_obj['srow_y'][1], header_obj['srow_z'][2]] img_arr = img_obj.get_data() return img_arr,", "image_arr_rgb[:, :, 0] = image_arr image_arr_rgb[:, :, 1] = image_arr", ":] = image[:, :, i] return new_image # def test_convert2depthfirst():", "of image is not support in this version') return None", "= np.expand_dims(mask_image, axis=0) print('after expand23D', np.shape(mask_image)) return mask_image ''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果", "根据病灶类型的ID返回类型的字符串 def return_typename_byid(typeid): idname_dict = return_type_idname() return idname_dict[typeid] # 根据病灶类型的name返回id的字符串", "# print('static scaler 0', interv) # scale down to 0", "ys) min_x = np.min(xs) min_y = np.min(ys) max_x = np.max(xs)", "min_v mhd_image = (mhd_image - min_v) / interv file_name =", "max_xs = np.max(xs) min_ys = np.min(ys) max_ys = np.max(ys) weakly_label_mask[min_xs:", "mhd_image = np.concatenate([mhd_image, mhd_image, mhd_image], axis=2) mask_image = np.asarray(np.squeeze(mask_image), np.uint8)", "dicom2jpg_multiphase(slice_dir, save_dir, phasenames=['NC', 'ART', 'PV'], target_phase='PV', suffix_name='npy'): target_mask = None", "static_pixel_num(image_dir, target_phase='PV'): # {0: 217784361, 1: 1392043, 2: 209128, 3:", "(kernel_size, kernel_size)) image = cv2.erode(img, kernel) return image # 图像膨胀", "print(zooms) after_zoom = np.zeros([size_training, size_training, np.shape(image)[2]]) for i in range(np.shape(after_zoom)[2]):", "phase # image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' # save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' #", "['NC', 'ART', 'PV'] for phasename in phasenames: if file_name.find(phasename) !=", "'PV'] for phasename in phasenames: if file_name.find(phasename) != -1: return", "= 1 seg = read_nii(seg_path) # print np.shape(volume), np.shape(seg) [_,", "cv2.dilate(img, kernel) return image def image_erode(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,", "as plt plt.figure(\"Image\") # 这里必须加 cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img, cmap='gray') plt.axis('on')", "dtype=np.float) print(zooms) after_zoom = np.zeros([size_training, size_training, np.shape(image)[2]]) for i in", "axis=0, keepdims=True) # next_slice = np.mean(next_slice, axis=0, keepdims=True) imgs.append( np.transpose(np.concatenate([pre_slice,", "# conver2JPG multi phase # image_dir = '/home/give/Documents/dataset/LiverLesionDetection_Splited/0' # static_pixel_num(image_dir,", "z = 0 # pre_slice.append(volume[:, :, z]) if (i -", "if (i - 1) < 0: pre_slice = np.expand_dims(volume[:, :,", "dataset_name+'_xml') if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir, phase_name, dataset_name+'_gt')", "''' 遍历MICCAI2018文件夹的框架 :param execute_func: :return: ''' for sub_name in ['train',", "3: 1486676, 4: 458278, 5: 705482} # {0: 1.0, 156,", "/ total_slice_num # 正样本是负样本的 if neg_rate > 1.0: neg_rate =", "if file_name.find(phasename) != -1: return phasename # 读取DICOM文件中包含的病例ID信息 def read_patientId(dicom_file_path):", "if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir, phase_name, dataset_name+'_gt') if", "pre_slice.append(volume[:, :, z]) if (i - 1) < 0: pre_slice", "image.save(save_path) def show_image(image): img = np.asarray(image, np.uint8) import matplotlib.pyplot as", "channel] = np.shape(volume) imgs = [] names = [] masks", "def convert2depthfirst(image): image = np.array(image) shape = np.shape(image) new_image =", "[0, 1, 2, 3, 4] def get_voxel_size(file_path): load_image_obj = nipy.load_image(file_path)", "= 'CYST' res[1] = 'FNH' res[2] = 'HCC' res[3] =", "# if z >= channel: # z = channel -", "np.array(image) shape = np.shape(image) new_image = np.zeros([shape[1], shape[2], shape[0]]) for", "s_maximum = 255. image = np.clip(image, c_minimum, c_maximum) interv =", "cur_slice_dir = os.path.join(image_dir, sub_name, name) execute_func(cur_slice_dir, *parameters) def dicom2jpg_singlephase(slice_dir, save_dir,", "read_patientId(dicom_file_path): ds = pydicom.read_file(dicom_file_path) return ds.PatientID # 返回病灶类型和ID的字典类型的数据 key是typename value是typeid", "255) np.save(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml')", "d] ''' def convert2depthlastest(image): image = np.array(image) shape = np.shape(image)", "= doc.createElement('object') root_node.appendChild(object_node) name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node =", "res[4] = 'METS' return res # 根据病灶类型的ID返回类型的字符串 def return_typename_byid(typeid): idname_dict", "np.argmax(sum_res) # 将一个矩阵保存为图片 def save_image(image_arr, save_path): image = Image.fromarray(np.asarray(image_arr, np.uint8))", "dicom2jpg_singlephase(slice_dir, save_dir, phase_name='PV'): mhd_image_path = glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0] mhd_mask_path = glob(os.path.join(slice_dir,", "z_axis_case = header['voxel_spacing'][-1] slice_num = int(z_axis / z_axis_case) if slice_num", "res # 返回病灶类型ID和名称的字典类型的数据 key是typeid value是typename def return_type_idname(): res = {}", "mhd_mask_path = glob(os.path.join(slice_dir, 'Mask_%s*.mhd' % phase_name))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image", "os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir, file_name + '.xml') gt_save_path = os.path.join(evulate_gt_dir,", "names, mask = extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir, 'tumor_types')) for key in pixel2type.keys():", "(interv / 2) image = np.asarray(image, np.float32) image = image", "= np.max(xs) ROI = image_arr_rgb[miny - 1:maxy + 1, minx", "(1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param kernel_size: :return: [5, kernel_size, kernel_size] ''' kernel_whole =", "512, 512)形式的图片 ''' def expand23D(mask_image): shape = list(np.shape(mask_image)) if len(shape)", "list(np.shape(mask_image)) if len(shape) == 2: mask_image = np.expand_dims(mask_image, axis=0) print('after", "= np.copy(kernel_whole) kernel_top[half_size + 1:, :] = 0 kernel_bottom =", "xs_max + 1, ys_min: ys_max + 1] def resize_image(image, size):", "= scipy.ndimage.zoom(image[:, :, i], zooms, order=1) # order = 1", "doc.createElement('object') root_node.appendChild(object_node) name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node = doc.createElement('truncated')", "def convert_dicomseries2mhd(dicom_series_dir, save_path): data = read_dicom_series(dicom_series_dir) save_mhd_image(data, save_path) # 读取单个DICOM文件", "image = image + 70 return np.array(image) # 保存mhd文件 def", "image = np.array(image) shape = np.shape(image) new_image = np.zeros([shape[1], shape[2],", "extract_ROI(image, mask_image): ''' 提取一幅图像中的ROI ''' xs, ys = np.where(mask_image ==", "range(shape[2]): new_image[i, :, :] = image[:, :, i] return new_image", "slice_num / 2 # next_start = i # for j", "= img_obj.header res_dict = {} res_dict['voxel_spacing'] = [header_obj['srow_x'][0], header_obj['srow_y'][1], header_obj['srow_z'][2]]", "= image[:, :, i] return new_image # def test_convert2depthfirst(): #", "pydicom.read_file(file_name) image = header.pixel_array image = header.RescaleSlope * image +", "read_nii_with_header(file_path): img_obj = nipy.load_image(file_path) header_obj = img_obj.header res_dict = {}", "cmap='gray') plt.axis('on') plt.title('image') plt.show() # 将图像画出来,并且画出标记的病灶 def save_image_with_mask(image_arr, mask_image, save_path):", "# print np.shape(volume), np.shape(seg) [_, _, channel] = np.shape(volume) imgs", "# image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' # save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' # phase_name", "= -70 volume[volume > max_v] = max_v volume[volume < min_v]", "image[image < -70] = -70 image[image > 180] = 180", "+ phase_name folder_node = doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node)", "value是typename def return_type_idname(): res = {} res[0] = 'CYST' res[1]", "np.bool) num_lesion_slices = np.sum(has_lesion) print os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1] if __name__", "根据文件名返回期项名 def return_phasename(file_name): phasenames = ['NC', 'ART', 'PV'] for phasename", ":, 1] = image_arr image_arr_rgb[:, :, 2] = image_arr image", "np.float32) mhd_image = np.expand_dims(mhd_image, axis=2) mhd_image = np.concatenate([mhd_image, mhd_image, mhd_image],", "header['voxel_spacing'][-1] slice_num = int(z_axis / z_axis_case) if slice_num == 0:", "height] :return: idx ''' sum_res = np.sum(np.sum(mask_image, axis=1), axis=1) return", "-= minval # scale down to 0 - 2 image", "Image, ImageDraw import gc from skimage.morphology import disk, dilation import", "shape[0]]) for i in range(shape[0]): new_image[:, :, i] = image[i,", "/ interv image = image * 2.0 # zoom desired_size", "image_expand(mask_image, r): # return dilation(mask_image, disk(r)) ''' 将形式如(512, 512)格式的图像转化为(1, 512,", "''' mask_nii_paths = glob(os.path.join(nii_dir, 'segmentation-*.nii')) for mask_nii_path in mask_nii_paths: mask_img", "pre_slice = np.expand_dims(volume[:, :, i], axis=0) else: pre_slice = np.expand_dims(volume[:,", "interv = 500 - (-350) image -= minval # scale", "opening def get_kernel_filters(kernel_size): ''' 返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param kernel_size: :return: [5,", "np.where(mask_image == 1) xs_min = np.min(xs) xs_max = np.max(xs) ys_min", "('Cyst', min_y, min_x, max_y, max_x) print(line) lines = [] lines.append(line)", "[] liver_masks = [] i = slice_num + 1 pos_slice_num", "return np.argmax(sum_res) # 将一个矩阵保存为图片 def save_image(image_arr, save_path): image = Image.fromarray(np.asarray(image_arr,", "mhd_images else: print('the suffix name does not support') assert False", "ys_max = np.max(ys) return image[xs_min: xs_max + 1, ys_min: ys_max", "0 kernel_bottom = np.copy(kernel_whole) kernel_bottom[:half_size, :] = 0 return np.concatenate([", "xs_max = np.max(xs) ys_min = np.min(ys) ys_max = np.max(ys) return", "save_image_with_mask(image_arr, mask_image, save_path): image_arr[image_arr < -70] = -70 image_arr[image_arr >", "binary_seg_slice = np.asarray(seg_slice == 2, np.uint8) # print np.max(binary_seg_slice) masks.append(binary_seg_slice)", ":param nii_dir: :return: ''' mask_nii_paths = glob(os.path.join(nii_dir, 'segmentation-*.nii')) for mask_nii_path", "'HCC' res[3] = 'HEM' res[4] = 'METS' return res #", "return_typeid_byname(typename): nameid_dict = return_type_nameid() return nameid_dict[typename] # 填充图像 def fill_region(image):", "min_ys: max_ys] = 1 liver_masks.append(np.asarray(seg_slice == 1, np.uint8)) tumor_weakly_masks.append(weakly_label_mask) #", "print('pos_slice_num is ', pos_slice_num, total_slice_num) neg_rate = (3.0 * pos_slice_num)", "= ['CYST', 'FNH', 'HCC', 'HEM', 'METS'] typeids = [0, 1,", "minval # print('static scaler 0', interv) # scale down to", "= np.array(itk.GetArrayFromImage(header)) if rejust: image[image < -70] = -70 image[image", "np.where(labeled_mask == idx) min_xs = np.min(xs) max_xs = np.max(xs) min_ys", "{} res['CYST'] = 0 res['FNH'] = 1 res['HCC'] = 2", "print np.shape(volume), np.shape(seg) [_, _, channel] = np.shape(volume) imgs =", "np.expand_dims(volume[:, :, i-1], axis=0) next_slice = [] # next_start =", "assert False max_v = 300. min_v = -350. mhd_image[mhd_image >", "axis=0) else: pre_slice = np.expand_dims(volume[:, :, i-1], axis=0) next_slice =", "folder_name = os.path.basename(save_dir) + '/' + phase_name folder_node = doc.createElement('folder')", "0] = image_arr image_arr_rgb[:, :, 1] = image_arr image_arr_rgb[:, :,", "file_name + '.jpg' filename_node = doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name)", "# scale down to 0 - 2 image /= (interv", "np.uint8) erosion = cv2.erode(img, kernel, iterations=1) return erosion def image_expand(img,", "np.zeros(shape=[shape[0], shape[1], 3]) image_arr_rgb[:, :, 0] = image_arr image_arr_rgb[:, :,", "folder_node.appendChild(folder_txt_node) file_name = file_name + '.jpg' filename_node = doc.createElement('filename') root_node.appendChild(filename_node)", "axis=0), np.expand_dims(kernel_top, axis=0), np.expand_dims(kernel_bottom, axis=0), ], axis=0) def image_erode(img, kernel_size=5):", "_, channel] = np.shape(volume) imgs = [] names = []", "/ 2 # pre_end = i # for j in", ",否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img, cmap='gray') plt.axis('on') plt.title('image') plt.show() # 将图像画出来,并且画出标记的病灶 def save_image_with_mask(image_arr,", "os.path.join(save_dir, phase_name, dataset_name+'_xml') if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir,", "range(1, slice_num + 1): # z = next_start + j", "np.expand_dims(volume[:, :, i], axis=0) else: next_slice = np.expand_dims(volume[:, :, i+1],", "image = np.array(image) # numpy_clip bottom = -300. top =", "0: slice_num = 1 seg = read_nii(seg_path) # print np.shape(volume),", "= 2 res['HEM'] = 3 res['METS'] = 4 return res", "* image + header.RescaleIntercept return image # 读取mhd文件 def read_mhd_image(file_path,", "bottom, top) # to float minval = -350 interv =", "minx = np.min(xs) maxx = np.max(xs) ROI = image_arr_rgb[miny -", "def image_expand(image, size): # def find_significant_layer(mask_image): ''' 找到显著层 :param mask_image:", "= os.path.join(image_dir, sub_name, name) mhd_mask_path = glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd' % target_phase))[0]", "[x_size, y_size, z_size] def read_nii(file_path): return nipy.load_image(file_path).get_data() def read_nii_with_header(file_path): img_obj", "512)格式的图像转化为(1, 512, 512)形式的图片 ''' def expand23D(mask_image): shape = list(np.shape(mask_image)) if", "= [] for phase_name in phasenames: mhd_image_path = glob(os.path.join(slice_dir, 'Image_%s*.mhd'", "slice_num + 1): # z = pre_end - j #", "after_zoom = np.zeros([size_training, size_training, np.shape(image)[2]]) for i in range(np.shape(after_zoom)[2]): after_zoom[:,", "convert2depthfirst(image): image = np.array(image) shape = np.shape(image) new_image = np.zeros([shape[2],", "kernel_top[half_size + 1:, :] = 0 kernel_bottom = np.copy(kernel_whole) kernel_bottom[:half_size,", "bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb') as f: f.write(doc.toprettyxml(indent='\\t', encoding='utf-8')) line =", ":param seg_path: :return: ''' from skimage.measure import label volume, header", "list(np.shape(image_arr)) image_arr_rgb = np.zeros(shape=[shape[0], shape[1], 3]) image_arr_rgb[:, :, 0] =", "返回病灶类型和ID的字典类型的数据 key是typename value是typeid def return_type_nameid(): res = {} res['CYST'] =", "np.clip(image, bottom, top) # to float minval = -350 interv", "file_name+'.' + suffix_name) if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of", "def dicom2jpg_multiphase(slice_dir, save_dir, phasenames=['NC', 'ART', 'PV'], target_phase='PV', suffix_name='npy'): target_mask =", "kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.dilate(img, kernel) return", "from glob import glob import scipy import cv2 from xml.dom.minidom", "1:] = 0 kernel_right = np.copy(kernel_whole) kernel_right[:, :half_size] = 0", "读取mhd文件 def read_mhd_image(file_path, rejust=False): header = itk.ReadImage(file_path) image = np.array(itk.GetArrayFromImage(header))", "header['srow_x'][0] y_size = header['srow_y'][1] z_size = header['srow_z'][2] return [x_size, y_size,", "name does not support') assert False max_v = 300. min_v", "axis=0, keepdims=True) imgs.append( np.transpose(np.concatenate([pre_slice, mid_slice, next_slice], axis=0), axes=[1, 2, 0]))", "file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) phase_name = ''.join(phasenames) save_path", "== idx) min_xs = np.min(xs) max_xs = np.max(xs) min_ys =", "tumor_weakly_masks = [] liver_masks = [] i = slice_num +", "index, y in enumerate(ys): image_draw.point([xs[index], y], fill=(255, 0, 0)) if", "+ 70 return np.array(image) # 保存mhd文件 def save_mhd_image(image, file_name): header", "np.uint8) def statics_num_slices_lesion(nii_dir): ''' 统计每个case,有多少slice具有病灶 :param nii_dir: :return: ''' mask_nii_paths", "= np.unique(mask) print pixel_value_set for value in list(pixel_value_set): static_res[value] +=", ":, z]) if (i - 1) < 0: pre_slice =", "np.concatenate(np.asarray(mhd_images, np.float), axis=0) mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0])", "def convertCase2PNGs(volume_path, seg_path, save_dir=None, z_axis=5.0, short_edge=64): ''' 将nii转化成PNG :param volume_path:", "'FNH' res[2] = 'HCC' res[3] = 'HEM' res[4] = 'METS'", "= (image - c_minimum) / interv * s_maximum minval =", "= np.clip(image, bottom, top) # to float minval = -350", "res['FNH'] = 1 res['HCC'] = 2 res['HEM'] = 3 res['METS']", "header.RescaleIntercept return image # 读取mhd文件 def read_mhd_image(file_path, rejust=False): header =", "np.shape(seg) [_, _, channel] = np.shape(volume) imgs = [] names", "xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node)", "xml.dom.minidom import Document typenames = ['CYST', 'FNH', 'HCC', 'HEM', 'METS']", "image_arr[image_arr < -70] = -70 image_arr[image_arr > 180] = 180", "def get_kernel_filters(kernel_size): ''' 返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param kernel_size: :return: [5, kernel_size,", "= slice_num + 1 pos_slice_num = np.sum(np.sum(np.sum(seg == 2, axis=0),", "np.uint8)) image_draw = ImageDraw.Draw(image) [ys, xs] = np.where(mask_image != 0)", "np.uint8) half_size = kernel_size // 2 kernel_left = np.copy(kernel_whole) kernel_left[:,", "+ slice_num / 2 # next_start = i # for", "image = np.array(image) shape = np.shape(image) new_image = np.zeros([shape[2], shape[0],", "= ImageDraw.Draw(image) [ys, xs] = np.where(mask_image != 0) miny =", "difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node)", "processing(image, size_training): image = np.array(image) # numpy_clip bottom = -300.", "> 180] = 180 image = image + 70 return", "# def find_significant_layer(mask_image): ''' 找到显著层 :param mask_image: [depth, width, height]", "pre_end = i # for j in range(1, slice_num +", "read_mhd_image(mhd_mask_path) min_xs, min_ys, max_xs, max_ys, names, mask = extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir,", "os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1] if __name__ == '__main__': # for phasename", "channel - 1 # next_slice.append(volume[:, :, z]) if (i +", "+ str(i)) binary_seg_slice = np.asarray(seg_slice == 2, np.uint8) # print", "in range(shape[0]): new_image[:, :, i] = image[i, :, :] return", "= return_type_idname() return idname_dict[typeid] # 根据病灶类型的name返回id的字符串 def return_typeid_byname(typename): nameid_dict =", "= 1 => biliniear interpolation return after_zoom def MICCAI2018_Iterator(image_dir, execute_func,", "ds.PatientID # 返回病灶类型和ID的字典类型的数据 key是typename value是typeid def return_type_nameid(): res = {}", "[] names = [] masks = [] tumor_weakly_masks = []", "'npy': mhd_images = np.concatenate(np.asarray(mhd_images, np.float), axis=0) mhd_images = np.transpose(np.asarray(mhd_images, np.float32),", ":return: [5, kernel_size, kernel_size] ''' kernel_whole = np.ones([kernel_size, kernel_size], np.uint8)", "mask_nii_paths: mask_img = read_nii(mask_nii_path) has_lesion = np.asarray(np.sum(np.sum(mask_img == 2, axis=0),", "'PV']: # convert_dicomseries2mhd( # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename, # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' +", "0, 0)) if save_path is None: image.show() else: image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path),", "doc.createElement('annotation') doc.appendChild(root_node) folder_name = os.path.basename(save_dir) + '/' + phase_name folder_node", "= doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node =", "return read_mhd_image(file_path) print('the format of image is not support in", "y], fill=(255, 0, 0)) if save_path is None: image.show() else:", "def image_erode(img, kernel_size=5): import cv2 import numpy as np kernel", "0) total_slice_num = np.shape(seg)[-1] print('pos_slice_num is ', pos_slice_num, total_slice_num) neg_rate", "open_operation(slice_image, kernel_size=3): opening = cv2.morphologyEx(slice_image, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))) return", "1 => biliniear interpolation return after_zoom def MICCAI2018_Iterator(image_dir, execute_func, *parameters):", "masks = [] tumor_weakly_masks = [] liver_masks = [] i", "save_dir, phasenames=['NC', 'ART', 'PV'], target_phase='PV', suffix_name='npy'): target_mask = None mhd_images", "filename_node = doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape =", "import extract_bboxs_mask_from_mask from config import pixel2type, type2pixel for sub_name in", "= 500. s_maximum = 255. image = np.clip(image, c_minimum, c_maximum)", "= image * 2.0 # zoom desired_size = [size_training, size_training]", "i in range(np.shape(after_zoom)[2]): after_zoom[:, :, i] = scipy.ndimage.zoom(image[:, :, i],", "doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node =", "mhd_image * 255) np.save(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir,", "z >= channel: # z = channel - 1 #", "# 返回病灶类型和ID的字典类型的数据 key是typename value是typeid def return_type_nameid(): res = {} res['CYST']", "180 # min_v = -70 volume[volume > max_v] = max_v", "= { 0: 0, 1: 0, 2: 0, 3: 0,", "+ 1, :] ROI_Image = Image.fromarray(np.asarray(ROI, np.uint8)) for index, y", "get_voxel_size(file_path): load_image_obj = nipy.load_image(file_path) header = load_image_obj.header x_size = header['srow_x'][0]", "2) image = np.asarray(image, np.float32) image = image / interv", "return mask_image ''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 ''' def find_centroid3D(image, flag): [x, y,", "= np.max(ys) object_node = doc.createElement('object') root_node.appendChild(object_node) name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst'))", "res_dict # 读取文件序列 def read_dicom_series(dir_name): reader = itk.ImageSeriesReader() dicom_series =", "import SimpleITK as itk import pydicom import numpy as np", "''' shape = list(np.shape(image)) if len(shape) == 3: return np.squeeze(image)", "= header['srow_z'][2] return [x_size, y_size, z_size] def read_nii(file_path): return nipy.load_image(file_path).get_data()", "将一个矩阵如果可能,压缩到三维的空间 ''' shape = list(np.shape(image)) if len(shape) == 3: return", "== flag) centroid_x = int(np.mean(x)) centroid_y = int(np.mean(y)) centroid_z =", "min_v] = min_v volume -= np.mean(volume) min_v = np.min(volume) max_v", "= np.shape(volume) imgs = [] names = [] masks =", "desired_size = [size_training, size_training] desired_size = np.asarray(desired_size, dtype=np.int) zooms =", "= {} res[0] = 'CYST' res[1] = 'FNH' res[2] =", "biliniear interpolation return after_zoom def preprocessing_agumentation(image, size_training): image = np.array(image)", "将DICOM序列转化成MHD文件 def convert_dicomseries2mhd(dicom_series_dir, save_path): data = read_dicom_series(dicom_series_dir) save_mhd_image(data, save_path) #", "= np.ones([kernel_size, kernel_size], np.uint8) half_size = kernel_size // 2 kernel_left", "list(pixel_value_set): static_res[value] += np.sum(mask == value) print(static_res) def convertCase2PNGs(volume_path, seg_path,", "z = channel - 1 # next_slice.append(volume[:, :, z]) if", "= (mhd_image - min_v) / interv file_name = os.path.basename(slice_dir) dataset_name", "-70 image_arr[image_arr > 180] = 180 image_arr = image_arr +", "np.copy(kernel_whole) kernel_bottom[:half_size, :] = 0 return np.concatenate([ np.expand_dims(kernel_whole, axis=0), np.expand_dims(kernel_left,", "= ''.join(phasenames) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.' + suffix_name)", "-1: return phasename # 读取DICOM文件中包含的病例ID信息 def read_patientId(dicom_file_path): ds = pydicom.read_file(dicom_file_path)", "image /= (interv / 2) image = np.asarray(image, np.float32) image", "%d\\n' % ('Cyst', min_y, min_x, max_y, max_x) print(line) lines =", "''' xs, ys = np.where(mask_image == 1) xs_min = np.min(xs)", "if neg_rate > 1.0: neg_rate = 1.0 for i in", "= kernel_size // 2 kernel_left = np.copy(kernel_whole) kernel_left[:, half_size +", "f.write(doc.toprettyxml(indent='\\t', encoding='utf-8')) line = '%s %d %d %d %d\\n' %", "np.array(image) # numpy_clip c_minimum = -300. c_maximum = 500. s_maximum", "after_zeros = convert2depthfirst(zeros) # print np.shape(after_zeros) # test_convert2depthfirst() ''' 将[d,", "liver_masks = [] i = slice_num + 1 pos_slice_num =", "= np.ones((kernel_size, kernel_size), np.uint8) erosion = cv2.erode(img, kernel, iterations=1) return", "= np.asarray(volume, np.float32) max_v = 250. min_v = -200. #", "disk, dilation import nipy import os from glob import glob", "= np.where(image == flag) centroid_x = int(np.mean(x)) centroid_y = int(np.mean(y))", "order = 1 => biliniear interpolation return after_zoom def preprocessing_agumentation(image,", "mask_image = read_mhd_image(mhd_mask_path) min_xs, min_ys, max_xs, max_ys, names, mask =", "header = pydicom.read_file(file_name) image = header.pixel_array image = header.RescaleSlope *", "save_path): image = Image.fromarray(np.asarray(image_arr, np.uint8)) image.save(save_path) def show_image(image): img =", "ndimage.binary_fill_holes(image).astype(np.uint8) return image def close_operation(binary_image, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size,", "= np.zeros([shape[2], shape[0], shape[1]]) for i in range(shape[2]): new_image[i, :,", "path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name) # image = read_nil(path) # print(np.shape(image))", "zeros = np.zeros([100, 100, 30]) # after_zeros = convert2depthfirst(zeros) #", "mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) cv2.imwrite(save_path, mhd_image * 255)", "= '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' # phase_name = 'NC' # MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir,", "min_v volume = (volume - min_v) / interv z_axis_case =", "top) # to float minval = -350 interv = 500", "maxx = np.max(xs) ROI = image_arr_rgb[miny - 1:maxy + 1,", "= pre_end - j # if z < 0: #", "mask_image: [depth, width, height] :return: idx ''' sum_res = np.sum(np.sum(mask_image,", ":, 2] = image_arr image = Image.fromarray(np.asarray(image_arr_rgb, np.uint8)) image_draw =", "2.0 # zoom desired_size = [size_training, size_training] desired_size = np.asarray(desired_size,", "labeled_mask = label(binary_seg_slice) weakly_label_mask = np.zeros_like(binary_seg_slice, np.uint8) for idx in", "], axis=0) def image_erode(img, kernel_size=5): import cv2 import numpy as", "return image def extract_ROI(image, mask_image): ''' 提取一幅图像中的ROI ''' xs, ys", "== 1) min_x = np.min(xs) min_y = np.min(ys) max_x =", "range(channel): seg_slice = seg[:, :, i] mid_slice = np.expand_dims(volume[:, :,", "image = (image - c_minimum) / interv * s_maximum minval", "- 1:maxx + 1, :] ROI_Image = Image.fromarray(np.asarray(ROI, np.uint8)) for", "phase_name = ''.join(phasenames) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.' +", "np.shape(volume) imgs = [] names = [] masks = []", "print(np.mean(mhd_image, dtype=np.float32)) mhd_image -= np.mean(mhd_image) min_v = np.min(mhd_image) max_v =", "1, :] ROI_Image = Image.fromarray(np.asarray(ROI, np.uint8)) for index, y in", "= cv2.morphologyEx(slice_image, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))) return opening def get_kernel_filters(kernel_size):", "# numpy_clip bottom = -300. top = 500. image =", "return None def processing(image, size_training): image = np.array(image) # numpy_clip", "gc from skimage.morphology import disk, dilation import nipy import os", "/ z_axis_case) if slice_num == 0: slice_num = 1 seg", "def save_image(image_arr, save_path): image = Image.fromarray(np.asarray(image_arr, np.uint8)) image.save(save_path) def show_image(image):", "''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 ''' def find_centroid3D(image, flag): [x, y, z] =", "= np.zeros_like(binary_seg_slice, np.uint8) for idx in range(1, np.max(labeled_mask) + 1):", "next_slice = np.expand_dims(volume[:, :, i], axis=0) else: next_slice = np.expand_dims(volume[:,", "-70 image[image > 180] = 180 image = image +", "int(np.mean(z)) return centroid_x, centroid_y, centroid_z ''' 将[w, h, d]reshape为[d, w,", "conver2JPG multi phase # image_dir = '/home/give/Documents/dataset/LiverLesionDetection_Splited/0' # static_pixel_num(image_dir, 'PV')", "= glob(os.path.join(slice_dir, phase_name + '_Mask*.mhd'))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image =", "- 1) < 0: pre_slice = np.expand_dims(volume[:, :, i], axis=0)", "in phasenames: if file_name.find(phasename) != -1: return phasename # 读取DICOM文件中包含的病例ID信息", "mid_slice = np.expand_dims(volume[:, :, i], axis=0) pre_slice = [] #", "np.float32) max_v = 250. min_v = -200. # max_v =", "mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image = mhd_images", "read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_images.append(mhd_image) mask_image", "interv = maxval - minval # print('static scaler 0', interv)", "= list(np.shape(mhd_image)) size_node = doc.createElement('size') root_node.appendChild(size_node) width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0])))", "bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y)))", "from skimage.morphology import disk, dilation import nipy import os from", "img_arr, res_dict # 读取文件序列 def read_dicom_series(dir_name): reader = itk.ImageSeriesReader() dicom_series", "PIL import Image, ImageDraw import gc from skimage.morphology import disk,", "np.expand_dims(mhd_image, axis=2) mhd_image = np.concatenate([mhd_image, mhd_image, mhd_image], axis=2) mask_image =", "= np.max(mhd_image) interv = max_v - min_v mhd_image = (mhd_image", "= '%s %d %d %d %d\\n' % ('Cyst', min_y, min_x,", "这里必须加 cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img, cmap='gray') plt.axis('on') plt.title('image') plt.show() # 将图像画出来,并且画出标记的病灶", "resize_image(image, size): image = Image.fromarray(np.asarray(image, np.uint8)) return image.resize((size, size)) #", "= 255. image = np.clip(image, c_minimum, c_maximum) interv = float(c_maximum", "image_arr_rgb = np.zeros(shape=[shape[0], shape[1], 3]) image_arr_rgb[:, :, 0] = image_arr", "[ys, xs] = np.where(mask_image != 0) miny = np.min(ys) maxy", "1) print(xs, ys) min_x = np.min(xs) min_y = np.min(ys) max_x", "return_typename_byid(typeid): idname_dict = return_type_idname() return idname_dict[typeid] # 根据病灶类型的name返回id的字符串 def return_typeid_byname(typename):", "= 0 res['FNH'] = 1 res['HCC'] = 2 res['HEM'] =", "= glob(os.path.join(slice_dir, 'Mask_%s*.mhd' % phase_name))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image =", "names = os.listdir(os.path.join(image_dir, sub_name)) for name in names: cur_slice_dir =", "kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.erode(img, kernel)", "# '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename, # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename + '.mhd'", "phase_name, dataset_name, file_name+'.' + suffix_name) if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the", "range(1, np.max(labeled_mask) + 1): xs, ys = np.where(labeled_mask == idx)", "146, 475, 308} static_res = { 0: 0, 1: 0,", "mhd_images = np.concatenate(np.asarray(mhd_images, np.float), axis=0) mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1,", "read_nil(file_path) if file_path.endswith('.mhd'): return read_mhd_image(file_path) print('the format of image is", "None: image.show() else: image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0] + '_ROI.jpg')) del image,", "names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') # for name in names: # path", "maxval - minval # print('static scaler 0', interv) # scale", "np.sum(has_lesion) print os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1] if __name__ == '__main__': #", "np.shape(image) new_image = np.zeros([shape[2], shape[0], shape[1]]) for i in range(shape[2]):", "正样本是负样本的 if neg_rate > 1.0: neg_rate = 1.0 for i", "np.max(xs) max_y = np.max(ys) object_node = doc.createElement('object') root_node.appendChild(object_node) name_node =", "{} res[0] = 'CYST' res[1] = 'FNH' res[2] = 'HCC'", "def image_expand(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image =", "= 0 kernel_right = np.copy(kernel_whole) kernel_right[:, :half_size] = 0 kernel_top", "将nii转化成PNG :param volume_path: nii的路径 :param seg_path: :return: ''' from skimage.measure", "is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) cv2.imwrite(save_path, mhd_image * 255) xml_save_dir", "0: pre_slice = np.expand_dims(volume[:, :, i], axis=0) else: pre_slice =", "glob import glob import scipy import cv2 from xml.dom.minidom import", "/ 2 # next_start = i # for j in", "返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 ''' def find_centroid3D(image, flag): [x, y, z] = np.where(image", "scipy.ndimage.zoom(image[:, :, i], zooms, order=1) # order = 1 =>", "mid_slice, next_slice], axis=0), axes=[1, 2, 0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-' +", "return res # 返回病灶类型ID和名称的字典类型的数据 key是typeid value是typename def return_type_idname(): res =", "return np.squeeze(image) return image def extract_ROI(image, mask_image): ''' 提取一幅图像中的ROI '''", "= max_v volume[volume < min_v] = min_v volume -= np.mean(volume)", "return np.asarray(imgs, np.float32), np.asarray(masks, np.uint8), np.asarray(liver_masks, np.uint8), np.asarray( tumor_weakly_masks, np.uint8)", "import scipy import cv2 from xml.dom.minidom import Document typenames =", "zooms, order=1) # order = 1 => biliniear interpolation return", "ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node)", "disk(r)) ''' 将形式如(512, 512)格式的图像转化为(1, 512, 512)形式的图片 ''' def expand23D(mask_image): shape", "execute_func: :return: ''' for sub_name in ['train', 'val', 'test']: names", "= np.zeros([100, 100, 30]) # after_zeros = convert2depthfirst(zeros) # print", "weakly_label_mask[min_xs: max_xs, min_ys: max_ys] = 1 liver_masks.append(np.asarray(seg_slice == 1, np.uint8))", "convert2depthlastest(image): image = np.array(image) shape = np.shape(image) new_image = np.zeros([shape[1],", "liver_masks.append(np.asarray(seg_slice == 1, np.uint8)) tumor_weakly_masks.append(weakly_label_mask) # i += 1 return", "将[d, w, h]reshape为[w, h, d] ''' def convert2depthlastest(image): image =", "next_slice = np.expand_dims(volume[:, :, i+1], axis=0) # pre_slice = np.mean(pre_slice,", "= image_arr + 70 shape = list(np.shape(image_arr)) image_arr_rgb = np.zeros(shape=[shape[0],", "%d %d\\n' % ('Cyst', min_y, min_x, max_y, max_x) print(line) lines", "dataset_name, file_name+'.jpg') if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image", "phase_name))[0] mhd_mask_path = glob(os.path.join(slice_dir, 'Mask_%s*.mhd' % phase_name))[0] mhd_image = read_mhd_image(mhd_image_path)", "glob(os.path.join(slice_dir, phase_name + '_Mask*.mhd'))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path)", "mask_image ''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 ''' def find_centroid3D(image, flag): [x, y, z]", "res[0] = 'CYST' res[1] = 'FNH' res[2] = 'HCC' res[3]", "np.where(mask_image == 1) min_x = np.min(xs) min_y = np.min(ys) max_x", "i] = image[i, :, :] return new_image def read_image_file(file_path): if", "image /= (interv / 2) # zoom desired_size = [size_training,", "= int(np.mean(z)) return centroid_x, centroid_y, centroid_z ''' 将[w, h, d]reshape为[d,", "# next_start = i # for j in range(1, slice_num", "print('after expand23D', np.shape(mask_image)) return mask_image ''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 ''' def find_centroid3D(image,", "= os.path.join(image_dir, sub_name, name) execute_func(cur_slice_dir, *parameters) def dicom2jpg_singlephase(slice_dir, save_dir, phase_name='PV'):", ":, i], zooms, order=1) # order = 1 => biliniear", "os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.jpg')", "''' kernel_whole = np.ones([kernel_size, kernel_size], np.uint8) half_size = kernel_size //", "ROI_Image gc.collect() def compress22dim(image): ''' 将一个矩阵如果可能,压缩到三维的空间 ''' shape = list(np.shape(image))", "plt.show() # 将图像画出来,并且画出标记的病灶 def save_image_with_mask(image_arr, mask_image, save_path): image_arr[image_arr < -70]", "xs, ys = np.where(mask_image == 1) min_x = np.min(xs) min_y", "* s_maximum minval = 0. maxval = 255. image -=", "mask_nii_path in mask_nii_paths: mask_img = read_nii(mask_nii_path) has_lesion = np.asarray(np.sum(np.sum(mask_img ==", "from xml.dom.minidom import Document typenames = ['CYST', 'FNH', 'HCC', 'HEM',", "np.max(mhd_image)) #cv2.imwrite(save_path, mhd_image * 255) np.save(save_path, mhd_image * 255) xml_save_dir", "[] masks = [] tumor_weakly_masks = [] liver_masks = []", "min_v = -70 volume[volume > max_v] = max_v volume[volume <", "= np.expand_dims(volume[:, :, i], axis=0) pre_slice = [] # pre_end", "1392043, 2: 209128, 3: 1486676, 4: 458278, 5: 705482} #", "-*- coding=utf-8 -*- import SimpleITK as itk import pydicom import", "def convert2depthlastest(image): image = np.array(image) shape = np.shape(image) new_image =", "static_res[value] += np.sum(mask == value) print(static_res) def convertCase2PNGs(volume_path, seg_path, save_dir=None,", "res[1] = 'FNH' res[2] = 'HCC' res[3] = 'HEM' res[4]", "= np.where(mask_image == 1) min_x = np.min(xs) min_y = np.min(ys)", "os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir, phase_name, dataset_name+'_gt') if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir)", "# 将一个矩阵保存为图片 def save_image(image_arr, save_path): image = Image.fromarray(np.asarray(image_arr, np.uint8)) image.save(save_path)", "read_image_file(file_path): if file_path.endswith('.nii'): return read_nil(file_path) if file_path.endswith('.mhd'): return read_mhd_image(file_path) print('the", "kernel, iterations=1) return erosion def image_expand(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,", "返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param kernel_size: :return: [5, kernel_size, kernel_size] ''' kernel_whole", "new_image # def test_convert2depthfirst(): # zeros = np.zeros([100, 100, 30])", "os.listdir(os.path.join(image_dir, sub_name)) for name in names: cur_slice_dir = os.path.join(image_dir, sub_name,", "np.shape(volume), np.shape(seg) [_, _, channel] = np.shape(volume) imgs = []", "0 xs, ys = np.where(mask_image == 1) print(xs, ys) min_x", "of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) #cv2.imwrite(save_path, mhd_image *", "if (i + 1) >= channel: next_slice = np.expand_dims(volume[:, :,", "- min_v volume = (volume - min_v) / interv z_axis_case", "= np.max(volume) interv = max_v - min_v volume = (volume", "np.min(xs) xs_max = np.max(xs) ys_min = np.min(ys) ys_max = np.max(ys)", "# 读取文件序列 def read_dicom_series(dir_name): reader = itk.ImageSeriesReader() dicom_series = reader.GetGDCMSeriesFileNames(dir_name)", "image_erode(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.erode(img,", "axis=0) def image_erode(img, kernel_size=5): import cv2 import numpy as np", ":, :] return new_image def read_image_file(file_path): if file_path.endswith('.nii'): return read_nil(file_path)", "down to 0 - 2 # image /= (interv /", "cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))) return opening def get_kernel_filters(kernel_size): ''' 返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作)", "# max_v = 180 # min_v = -70 volume[volume >", "i + slice_num / 2 # next_start = i #", "xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml') if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir", "mhd_image = np.expand_dims(mhd_image, axis=2) mhd_image = np.concatenate([mhd_image, mhd_image, mhd_image], axis=2)", "= convert2depthfirst(zeros) # print np.shape(after_zeros) # test_convert2depthfirst() ''' 将[d, w,", "seg_slice = seg[:, :, i] mid_slice = np.expand_dims(volume[:, :, i],", "'segmentation-*.nii')) for mask_nii_path in mask_nii_paths: mask_img = read_nii(mask_nii_path) has_lesion =", "nipy.load_image(file_path).get_data() def read_nii_with_header(file_path): img_obj = nipy.load_image(file_path) header_obj = img_obj.header res_dict", "np.mean(volume) min_v = np.min(volume) max_v = np.max(volume) interv = max_v", "of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) cv2.imwrite(save_path, mhd_image *", "max_ys, names, mask = extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir, 'tumor_types')) for key in", "= pydicom.read_file(dicom_file_path) return ds.PatientID # 返回病灶类型和ID的字典类型的数据 key是typename value是typeid def return_type_nameid():", "np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image = mhd_images else: print('the", "xs] = np.where(mask_image != 0) miny = np.min(ys) maxy =", "# to float minval = -350 interv = 500 -", "shape = list(np.shape(image_arr)) image_arr_rgb = np.zeros(shape=[shape[0], shape[1], 3]) image_arr_rgb[:, :,", "image def extract_ROI(image, mask_image): ''' 提取一幅图像中的ROI ''' xs, ys =", "doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path,", "= doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node", "max_xs, max_ys, names, mask = extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir, 'tumor_types')) for key", "mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_images.append(mhd_image) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) if", "% ('Cyst', min_y, min_x, max_y, max_x) print(line) lines = []", "xs_min = np.min(xs) xs_max = np.max(xs) ys_min = np.min(ys) ys_max", "= -300. c_maximum = 500. s_maximum = 255. image =", "= 1.0 for i in range(channel): seg_slice = seg[:, :,", "f.writelines(lines) f.close() def dicom2jpg_multiphase(slice_dir, save_dir, phasenames=['NC', 'ART', 'PV'], target_phase='PV', suffix_name='npy'):", "skimage.morphology import disk, dilation import nipy import os from glob", "kernel_size), np.uint8) erosion = cv2.erode(img, kernel, iterations=1) return erosion def", "np.shape(mask_image)) return mask_image ''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 ''' def find_centroid3D(image, flag): [x,", "ys_min: ys_max + 1] def resize_image(image, size): image = Image.fromarray(np.asarray(image,", "+ 1, ys_min: ys_max + 1] def resize_image(image, size): image", "phasenames: if file_name.find(phasename) != -1: return phasename # 读取DICOM文件中包含的病例ID信息 def", "minval = 0. maxval = 255. image -= minval interv", "np.float32), axes=[1, 2, 0]) mhd_image = mhd_images elif suffix_name ==", "convertCase2PNGs(volume_path, seg_path, save_dir=None, z_axis=5.0, short_edge=64): ''' 将nii转化成PNG :param volume_path: nii的路径", "# 根据文件名返回期项名 def return_phasename(file_name): phasenames = ['NC', 'ART', 'PV'] for", "np.asarray(np.squeeze(mhd_image), np.float32) mhd_images.append(mhd_image) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) if phase_name ==", "'PV'], target_phase='PV', suffix_name='npy'): target_mask = None mhd_images = [] for", "= read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_images.append(mhd_image) mask_image = np.asarray(np.squeeze(mask_image),", "+ '/' + phase_name folder_node = doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node =", ":, :] = image[:, :, i] return new_image # def", "read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_image =", "filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image)) size_node = doc.createElement('size')", "d]reshape为[d, w, h] ''' def convert2depthfirst(image): image = np.array(image) shape", "kernel_size)) image = cv2.dilate(img, kernel) return image def image_erode(img, kernel_size=5):", "+ phasename, # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename + '.mhd' # )", "= -70 image_arr[image_arr > 180] = 180 image_arr = image_arr", "3: mask_image = mask_image[1, :, :] print('the mask image shape", "in range(np.shape(after_zoom)[2]): after_zoom[:, :, i] = scipy.ndimage.zoom(image[:, :, i], zooms,", "range(np.shape(after_zoom)[2]): after_zoom[:, :, i] = scipy.ndimage.zoom(image[:, :, i], zooms, order=1)", ":return: idx ''' sum_res = np.sum(np.sum(mask_image, axis=1), axis=1) return np.argmax(sum_res)", "/ interv * s_maximum minval = 0. maxval = 255.", "np.uint8), np.asarray( tumor_weakly_masks, np.uint8) def statics_num_slices_lesion(nii_dir): ''' 统计每个case,有多少slice具有病灶 :param nii_dir:", "read_mhd_image(file_path) print('the format of image is not support in this", "1) min_x = np.min(xs) min_y = np.min(ys) max_x = np.max(xs)", "np.asarray(volume, np.float32) max_v = 250. min_v = -200. # max_v", "'.mhd' # ) # names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') # for name", "else: pre_slice = np.expand_dims(volume[:, :, i-1], axis=0) next_slice = []", "os.path.join(image_dir, sub_name, name) execute_func(cur_slice_dir, *parameters) def dicom2jpg_singlephase(slice_dir, save_dir, phase_name='PV'): mhd_image_path", "import glob import scipy import cv2 from xml.dom.minidom import Document", "return erosion def image_expand(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))", "np.uint8)) return image.resize((size, size)) # def image_expand(mask_image, r): # return", "max_v mhd_image[mhd_image < min_v] = min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image -=", "ROI_Image = Image.fromarray(np.asarray(ROI, np.uint8)) for index, y in enumerate(ys): image_draw.point([xs[index],", "image_array # 将DICOM序列转化成MHD文件 def convert_dicomseries2mhd(dicom_series_dir, save_path): data = read_dicom_series(dicom_series_dir) save_mhd_image(data,", "size): # def find_significant_layer(mask_image): ''' 找到显著层 :param mask_image: [depth, width,", "= read_mhd_image(mhd_mask_path) min_xs, min_ys, max_xs, max_ys, names, mask = extract_bboxs_mask_from_mask(mask_image,", "file_name.find(phasename) != -1: return phasename # 读取DICOM文件中包含的病例ID信息 def read_patientId(dicom_file_path): ds", "dilation import nipy import os from glob import glob import", "np.asarray(image, np.float32) image = image / interv image = image", "cv2.erode(img, kernel) return image # 图像膨胀 # def image_expand(image, size):", "if rejust: image[image < -70] = -70 image[image > 180]", "read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_image = np.expand_dims(mhd_image, axis=2) mhd_image", "axis=0) != 0) total_slice_num = np.shape(seg)[-1] print('pos_slice_num is ', pos_slice_num,", "len(mask_image_shape) == 3: mask_image = mask_image[1, :, :] print('the mask", "i = slice_num + 1 pos_slice_num = np.sum(np.sum(np.sum(seg == 2,", "max_v = np.max(mhd_image) interv = max_v - min_v mhd_image =", "mask_image = target_mask mask_image_shape = list(np.shape(mask_image)) if len(mask_image_shape) == 3:", "type2pixel[pixel2type[key]][0] pixel_value_set = np.unique(mask) print pixel_value_set for value in list(pixel_value_set):", "image = np.clip(image, bottom, top) # to float minval =", "phase_name, dataset_name, file_name+'.jpg') if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of", "= np.min(volume) max_v = np.max(volume) interv = max_v - min_v", "= i + slice_num / 2 # next_start = i", "np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) #cv2.imwrite(save_path, mhd_image * 255) np.save(save_path, mhd_image *", "def return_typename_byid(typeid): idname_dict = return_type_idname() return idname_dict[typeid] # 根据病灶类型的name返回id的字符串 def", "= glob(os.path.join(nii_dir, 'segmentation-*.nii')) for mask_nii_path in mask_nii_paths: mask_img = read_nii(mask_nii_path)", "# zeros = np.zeros([100, 100, 30]) # after_zeros = convert2depthfirst(zeros)", "= np.min(ys) max_x = np.max(xs) max_y = np.max(ys) object_node =", "lines.append(line) with open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def dicom2jpg_multiphase(slice_dir,", "None mhd_images = [] for phase_name in phasenames: mhd_image_path =", "next_start + j # if z >= channel: # z", "os.path.join(save_dir, phase_name, dataset_name, file_name+'.' + suffix_name) if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path))", "读取文件序列 def read_dicom_series(dir_name): reader = itk.ImageSeriesReader() dicom_series = reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series)", "2, np.uint8) # print np.max(binary_seg_slice) masks.append(binary_seg_slice) labeled_mask = label(binary_seg_slice) weakly_label_mask", "np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) cv2.imwrite(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir,", "np.max(xs) min_ys = np.min(ys) max_ys = np.max(ys) weakly_label_mask[min_xs: max_xs, min_ys:", "-= np.mean(volume) min_v = np.min(volume) max_v = np.max(volume) interv =", "# 根据病灶类型的name返回id的字符串 def return_typeid_byname(typename): nameid_dict = return_type_nameid() return nameid_dict[typename] #", "read_dicom_file(file_name): header = pydicom.read_file(file_name) image = header.pixel_array image = header.RescaleSlope", "image.show() from scipy import ndimage image = ndimage.binary_fill_holes(image).astype(np.uint8) return image", "dataset_name = os.path.basename(os.path.dirname(slice_dir)) phase_name = ''.join(phasenames) save_path = os.path.join(save_dir, phase_name,", "= 250. min_v = -200. # max_v = 180 #", "# '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename + '.mhd' # ) # names", "image = image / interv image = image * 2.0", "= 'HCC' res[3] = 'HEM' res[4] = 'METS' return res", "= list(np.shape(image_arr)) image_arr_rgb = np.zeros(shape=[shape[0], shape[1], 3]) image_arr_rgb[:, :, 0]", "= np.asarray(np.squeeze(mask_image), np.uint8) max_v = 300. min_v = -350. mhd_image[mhd_image", "interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) phase_name = ''.join(phasenames)", "!= 0) total_slice_num = np.shape(seg)[-1] print('pos_slice_num is ', pos_slice_num, total_slice_num)", "= -70 image[image > 180] = 180 image = image", "max_ys = np.max(ys) weakly_label_mask[min_xs: max_xs, min_ys: max_ys] = 1 liver_masks.append(np.asarray(seg_slice", "in ['train', 'val', 'test']: names = os.listdir(os.path.join(image_dir, sub_name)) for name", "os.path.basename(save_dir) + '/' + phase_name folder_node = doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node", "xs, ys = np.where(mask_image == 1) print(xs, ys) min_x =", "slice_num + 1): # z = next_start + j #", "= np.where(mask_image != 0) miny = np.min(ys) maxy = np.max(ys)", "interpolation return after_zoom def MICCAI2018_Iterator(image_dir, execute_func, *parameters): ''' 遍历MICCAI2018文件夹的框架 :param", "= np.min(xs) maxx = np.max(xs) ROI = image_arr_rgb[miny - 1:maxy", "np.where(image == flag) centroid_x = int(np.mean(x)) centroid_y = int(np.mean(y)) centroid_z", "print(np.shape(mhd_images)) mask_image = target_mask mask_image_shape = list(np.shape(mask_image)) if len(mask_image_shape) ==", "''' def convert2depthfirst(image): image = np.array(image) shape = np.shape(image) new_image", "doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image)) size_node", "axis=1), axis=1) return np.argmax(sum_res) # 将一个矩阵保存为图片 def save_image(image_arr, save_path): image", "-= minval interv = maxval - minval # print('static scaler", "save_mhd_image(data, save_path) # 读取单个DICOM文件 def read_dicom_file(file_name): header = pydicom.read_file(file_name) image", "z_size = header['srow_z'][2] return [x_size, y_size, z_size] def read_nii(file_path): return", "s_maximum minval = 0. maxval = 255. image -= minval", "print(line) lines = [] lines.append(line) with open(gt_save_path, 'w') as f:", "glob(os.path.join(slice_dir, 'Mask_%s*.mhd' % phase_name))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path)", "interv * s_maximum minval = 0. maxval = 255. image", "return image # 图像膨胀 # def image_expand(image, size): # def", "itk.ImageSeriesReader() dicom_series = reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series) images = reader.Execute() image_array =", "new_image[i, :, :] = image[:, :, i] return new_image #", "dtype=np.float32)) mhd_image -= np.mean(mhd_image) min_v = np.min(mhd_image) max_v = np.max(mhd_image)", "# next_slice.append(volume[:, :, z]) if (i + 1) >= channel:", "centroid_z ''' 将[w, h, d]reshape为[d, w, h] ''' def convert2depthfirst(image):", "= np.max(xs) ys_min = np.min(ys) ys_max = np.max(ys) return image[xs_min:", "res['METS'] = 4 return res # 返回病灶类型ID和名称的字典类型的数据 key是typeid value是typename def", "flag) centroid_x = int(np.mean(x)) centroid_y = int(np.mean(y)) centroid_z = int(np.mean(z))", "pos_slice_num, total_slice_num) neg_rate = (3.0 * pos_slice_num) / total_slice_num #", "volume, header = read_nii_with_header(volume_path) # volume = np.transpose(volume, [1, 0,", "centroid_y = int(np.mean(y)) centroid_z = int(np.mean(z)) return centroid_x, centroid_y, centroid_z", "after_zoom def preprocessing_agumentation(image, size_training): image = np.array(image) # numpy_clip c_minimum", "np.float32) mhd_images.append(mhd_image) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) if phase_name == target_phase:", "read_nil(path) # print(np.shape(image)) # conver2JPG single phase # image_dir =", "save_image(image_arr, save_path): image = Image.fromarray(np.asarray(image_arr, np.uint8)) image.save(save_path) def show_image(image): img", "1) xs_min = np.min(xs) xs_max = np.max(xs) ys_min = np.min(ys)", "= list(np.shape(mask_image)) if len(mask_image_shape) == 3: mask_image = mask_image[1, :,", "type2pixel for sub_name in ['train', 'val', 'test']: names = os.listdir(os.path.join(image_dir,", "512)形式的图片 ''' def expand23D(mask_image): shape = list(np.shape(mask_image)) if len(shape) ==", "= os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) save_path = os.path.join(save_dir, phase_name, dataset_name,", "== 2: mask_image = np.expand_dims(mask_image, axis=0) print('after expand23D', np.shape(mask_image)) return", "read_nii(file_path): return nipy.load_image(file_path).get_data() def read_nii_with_header(file_path): img_obj = nipy.load_image(file_path) header_obj =", "import ndimage image = ndimage.binary_fill_holes(image).astype(np.uint8) return image def close_operation(binary_image, kernel_size=5):", "+ phasename + '.mhd' # ) # names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2')", "image = Image.fromarray(np.asarray(image, np.uint8)) return image.resize((size, size)) # def image_expand(mask_image,", "+ '.txt') # for evulate doc = Document() root_node =", "-*- import SimpleITK as itk import pydicom import numpy as", "sub_name in ['train', 'val', 'test']: names = os.listdir(os.path.join(image_dir, sub_name)) for", "2 # image /= (interv / 2) image = np.asarray(image,", "show_image(image): img = np.asarray(image, np.uint8) import matplotlib.pyplot as plt plt.figure(\"Image\")", "def dicom2jpg_singlephase(slice_dir, save_dir, phase_name='PV'): mhd_image_path = glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0] mhd_mask_path =", "= np.min(xs) max_xs = np.max(xs) min_ys = np.min(ys) max_ys =", "expand23D(mask_image): shape = list(np.shape(mask_image)) if len(shape) == 2: mask_image =", "support') assert False max_v = 300. min_v = -350. mhd_image[mhd_image", ":, i], axis=0) else: pre_slice = np.expand_dims(volume[:, :, i-1], axis=0)", ":, i+1], axis=0) # pre_slice = np.mean(pre_slice, axis=0, keepdims=True) #", "image[i, :, :] return new_image def read_image_file(file_path): if file_path.endswith('.nii'): return", "mhd_image = (mhd_image - min_v) / interv file_name = os.path.basename(slice_dir)", "(mhd_image - min_v) / interv file_name = os.path.basename(slice_dir) dataset_name =", "does not support') assert False max_v = 300. min_v =", "from convert2jpg import extract_bboxs_mask_from_mask from config import pixel2type, type2pixel for", "1 seg = read_nii(seg_path) # print np.shape(volume), np.shape(seg) [_, _,", "# 读取DICOM文件中包含的病例ID信息 def read_patientId(dicom_file_path): ds = pydicom.read_file(dicom_file_path) return ds.PatientID #", "in range(channel): seg_slice = seg[:, :, i] mid_slice = np.expand_dims(volume[:,", "= [] lines.append(line) with open(gt_save_path, 'w') as f: f.writelines(lines) f.close()", "file_path.endswith('.mhd'): return read_mhd_image(file_path) print('the format of image is not support", "names: cur_slice_dir = os.path.join(image_dir, sub_name, name) execute_func(cur_slice_dir, *parameters) def dicom2jpg_singlephase(slice_dir,", "# def image_expand(image, size): # def find_significant_layer(mask_image): ''' 找到显著层 :param", "将形式如(512, 512)格式的图像转化为(1, 512, 512)形式的图片 ''' def expand23D(mask_image): shape = list(np.shape(mask_image))", "= 1 res['HCC'] = 2 res['HEM'] = 3 res['METS'] =", "find_centroid3D(image, flag): [x, y, z] = np.where(image == flag) centroid_x", "axis=0) else: next_slice = np.expand_dims(volume[:, :, i+1], axis=0) # pre_slice", "- c_minimum) image = (image - c_minimum) / interv *", "= doc.createElement('size') root_node.appendChild(size_node) width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height')", "return phasename # 读取DICOM文件中包含的病例ID信息 def read_patientId(dicom_file_path): ds = pydicom.read_file(dicom_file_path) return", "np.copy(kernel_whole) kernel_right[:, :half_size] = 0 kernel_top = np.copy(kernel_whole) kernel_top[half_size +", "= np.zeros([size_training, size_training, np.shape(image)[2]]) for i in range(np.shape(after_zoom)[2]): after_zoom[:, :,", "-300. c_maximum = 500. s_maximum = 255. image = np.clip(image,", "np.min(volume) max_v = np.max(volume) interv = max_v - min_v volume", "mhd_image = mhd_images else: print('the suffix name does not support')", "y in enumerate(ys): image_draw.point([xs[index], y], fill=(255, 0, 0)) if save_path", "== 3: return np.squeeze(image) return image def extract_ROI(image, mask_image): '''", "pre_end - j # if z < 0: # z", "+ suffix_name) if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image", "centroid_y, centroid_z ''' 将[w, h, d]reshape为[d, w, h] ''' def", "num_lesion_slices = np.sum(has_lesion) print os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1] if __name__ ==", "3: return np.squeeze(image) return image def extract_ROI(image, mask_image): ''' 提取一幅图像中的ROI", "preprocessing_agumentation(image, size_training): image = np.array(image) # numpy_clip c_minimum = -300.", "# {0: 217784361, 1: 1392043, 2: 209128, 3: 1486676, 4:", "phase_name='PV'): mhd_image_path = glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0] mhd_mask_path = glob(os.path.join(slice_dir, phase_name +", "half_size + 1:] = 0 kernel_right = np.copy(kernel_whole) kernel_right[:, :half_size]", "1): # z = pre_end - j # if z", "np.mean(mhd_image) min_v = np.min(mhd_image) max_v = np.max(mhd_image) interv = max_v", "return image.resize((size, size)) # def image_expand(mask_image, r): # return dilation(mask_image,", "kernel_size=3): opening = cv2.morphologyEx(slice_image, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))) return opening", "def read_mhd_image(file_path, rejust=False): header = itk.ReadImage(file_path) image = np.array(itk.GetArrayFromImage(header)) if", "nameid_dict = return_type_nameid() return nameid_dict[typename] # 填充图像 def fill_region(image): #", "= {} res['CYST'] = 0 res['FNH'] = 1 res['HCC'] =", "len(shape) == 3: return np.squeeze(image) return image def extract_ROI(image, mask_image):", "glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd' % target_phase))[0] mask_image = read_mhd_image(mhd_mask_path) min_xs, min_ys, max_xs,", "volume = np.asarray(volume, np.float32) max_v = 250. min_v = -200.", "max_v = np.max(volume) interv = max_v - min_v volume =", "z] = np.where(image == flag) centroid_x = int(np.mean(x)) centroid_y =", "['train', 'val', 'test']: names = os.listdir(os.path.join(image_dir, sub_name)) for name in", "= read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_images.append(mhd_image)", "if z >= channel: # z = channel - 1", "(kernel_size, kernel_size)) image = cv2.dilate(img, kernel) return image def image_erode(img,", "# ) # names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') # for name in", "= np.asarray(np.squeeze(mhd_image), np.float32) mhd_images.append(mhd_image) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) if phase_name", "ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0] + '_ROI.jpg')) del image, ROI_Image gc.collect() def compress22dim(image):", "0 # pre_slice.append(volume[:, :, z]) if (i - 1) <", "[] for phase_name in phasenames: mhd_image_path = glob(os.path.join(slice_dir, 'Image_%s*.mhd' %", "(-350) image -= minval # scale down to 0 -", "1.0: neg_rate = 1.0 for i in range(channel): seg_slice =", "dataset_name, file_name+'.' + suffix_name) if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape", "next_start = i + slice_num / 2 # next_start =", "= read_nil(path) # print(np.shape(image)) # conver2JPG single phase # image_dir", "# zoom desired_size = [size_training, size_training] desired_size = np.asarray(desired_size, dtype=np.int)", "image_arr[image_arr > 180] = 180 image_arr = image_arr + 70", "compress22dim(image): ''' 将一个矩阵如果可能,压缩到三维的空间 ''' shape = list(np.shape(image)) if len(shape) ==", "weakly_label_mask = np.zeros_like(binary_seg_slice, np.uint8) for idx in range(1, np.max(labeled_mask) +", ":, :] print('the mask image shape is ', np.shape(mask_image)) if", "return idname_dict[typeid] # 根据病灶类型的name返回id的字符串 def return_typeid_byname(typename): nameid_dict = return_type_nameid() return" ]
[ "License', 'Programming Language :: Python :: 3.5', ], keywords='zoho, API,", "3.5', ], keywords='zoho, API, zoho project', url='https://github.com/marcus-luck/zohoreader', author='<NAME>', author_email='<EMAIL>', license='MIT',", "import setup def readme(): with open('README.rst') as f: return f.read()", "zoho projects API to get all projects, users and timereports',", "Status :: 3 - Alpha', 'License :: OSI Approved ::", "author='<NAME>', author_email='<EMAIL>', license='MIT', packages=['zohoreader'], zip_safe=False, install_requires=[ 'requests>=2.12.4', 'python-dateutil>=2.7.2' ], test_suite='nose.collector',", "readme(): with open('README.rst') as f: return f.read() setup(name='zohoreader', version='0.1', description='A", "API, zoho project', url='https://github.com/marcus-luck/zohoreader', author='<NAME>', author_email='<EMAIL>', license='MIT', packages=['zohoreader'], zip_safe=False, install_requires=[", "long_description=readme(), classifiers=[ 'Development Status :: 3 - Alpha', 'License ::", "classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI", "author_email='<EMAIL>', license='MIT', packages=['zohoreader'], zip_safe=False, install_requires=[ 'requests>=2.12.4', 'python-dateutil>=2.7.2' ], test_suite='nose.collector', tests_require=['nose',", "project', url='https://github.com/marcus-luck/zohoreader', author='<NAME>', author_email='<EMAIL>', license='MIT', packages=['zohoreader'], zip_safe=False, install_requires=[ 'requests>=2.12.4', 'python-dateutil>=2.7.2'", "setuptools import setup def readme(): with open('README.rst') as f: return", "as f: return f.read() setup(name='zohoreader', version='0.1', description='A simple reader for", "f.read() setup(name='zohoreader', version='0.1', description='A simple reader for zoho projects API", "keywords='zoho, API, zoho project', url='https://github.com/marcus-luck/zohoreader', author='<NAME>', author_email='<EMAIL>', license='MIT', packages=['zohoreader'], zip_safe=False,", "timereports', long_description=readme(), classifiers=[ 'Development Status :: 3 - Alpha', 'License", "users and timereports', long_description=readme(), classifiers=[ 'Development Status :: 3 -", "Language :: Python :: 3.5', ], keywords='zoho, API, zoho project',", "Python :: 3.5', ], keywords='zoho, API, zoho project', url='https://github.com/marcus-luck/zohoreader', author='<NAME>',", ":: MIT License', 'Programming Language :: Python :: 3.5', ],", "MIT License', 'Programming Language :: Python :: 3.5', ], keywords='zoho,", "from setuptools import setup def readme(): with open('README.rst') as f:", "open('README.rst') as f: return f.read() setup(name='zohoreader', version='0.1', description='A simple reader", "'License :: OSI Approved :: MIT License', 'Programming Language ::", "OSI Approved :: MIT License', 'Programming Language :: Python ::", "zoho project', url='https://github.com/marcus-luck/zohoreader', author='<NAME>', author_email='<EMAIL>', license='MIT', packages=['zohoreader'], zip_safe=False, install_requires=[ 'requests>=2.12.4',", "def readme(): with open('README.rst') as f: return f.read() setup(name='zohoreader', version='0.1',", "reader for zoho projects API to get all projects, users", "'Programming Language :: Python :: 3.5', ], keywords='zoho, API, zoho", "for zoho projects API to get all projects, users and", "simple reader for zoho projects API to get all projects,", "Approved :: MIT License', 'Programming Language :: Python :: 3.5',", "], keywords='zoho, API, zoho project', url='https://github.com/marcus-luck/zohoreader', author='<NAME>', author_email='<EMAIL>', license='MIT', packages=['zohoreader'],", "f: return f.read() setup(name='zohoreader', version='0.1', description='A simple reader for zoho", "version='0.1', description='A simple reader for zoho projects API to get", "3 - Alpha', 'License :: OSI Approved :: MIT License',", ":: 3.5', ], keywords='zoho, API, zoho project', url='https://github.com/marcus-luck/zohoreader', author='<NAME>', author_email='<EMAIL>',", "get all projects, users and timereports', long_description=readme(), classifiers=[ 'Development Status", "projects, users and timereports', long_description=readme(), classifiers=[ 'Development Status :: 3", ":: OSI Approved :: MIT License', 'Programming Language :: Python", "zip_safe=False, install_requires=[ 'requests>=2.12.4', 'python-dateutil>=2.7.2' ], test_suite='nose.collector', tests_require=['nose', 'nose-cover3'], include_package_data=True )", "setup def readme(): with open('README.rst') as f: return f.read() setup(name='zohoreader',", "with open('README.rst') as f: return f.read() setup(name='zohoreader', version='0.1', description='A simple", "all projects, users and timereports', long_description=readme(), classifiers=[ 'Development Status ::", "and timereports', long_description=readme(), classifiers=[ 'Development Status :: 3 - Alpha',", "packages=['zohoreader'], zip_safe=False, install_requires=[ 'requests>=2.12.4', 'python-dateutil>=2.7.2' ], test_suite='nose.collector', tests_require=['nose', 'nose-cover3'], include_package_data=True", "to get all projects, users and timereports', long_description=readme(), classifiers=[ 'Development", "'Development Status :: 3 - Alpha', 'License :: OSI Approved", "return f.read() setup(name='zohoreader', version='0.1', description='A simple reader for zoho projects", "description='A simple reader for zoho projects API to get all", "- Alpha', 'License :: OSI Approved :: MIT License', 'Programming", "Alpha', 'License :: OSI Approved :: MIT License', 'Programming Language", ":: Python :: 3.5', ], keywords='zoho, API, zoho project', url='https://github.com/marcus-luck/zohoreader',", "API to get all projects, users and timereports', long_description=readme(), classifiers=[", ":: 3 - Alpha', 'License :: OSI Approved :: MIT", "url='https://github.com/marcus-luck/zohoreader', author='<NAME>', author_email='<EMAIL>', license='MIT', packages=['zohoreader'], zip_safe=False, install_requires=[ 'requests>=2.12.4', 'python-dateutil>=2.7.2' ],", "license='MIT', packages=['zohoreader'], zip_safe=False, install_requires=[ 'requests>=2.12.4', 'python-dateutil>=2.7.2' ], test_suite='nose.collector', tests_require=['nose', 'nose-cover3'],", "setup(name='zohoreader', version='0.1', description='A simple reader for zoho projects API to", "projects API to get all projects, users and timereports', long_description=readme()," ]
[ "def org_seleniumhq_py(): http_archive( name = \"org_seleniumhq_py\", build_file = str(Label(\"//build_files:org_seleniumhq_py.BUILD\")), sha256", "\"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz\", \"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz\", ], ) def com_github_tebeka_selenium(): go_repository( name = \"com_github_tebeka_selenium\",", "repository. The args dict will be mutated to remove \"omit_\"", "2.0 (the \"License\"); # you may not use this file", ") def org_apache_commons_exec(): java_import_external( name = \"org_apache_commons_exec\", jar_sha256 = \"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b\",", "= [ \"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz\", ], ) def org_seleniumhq_selenium_api(): java_import_external( name =", "# SauceLabs EULA amd64_sha256 = \"dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52\", amd64_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz\",", "kwargs): com_github_blang_semver() if should_create_repository(\"com_github_gorilla_context\", kwargs): com_github_gorilla_context() if should_create_repository(\"com_github_gorilla_mux\", kwargs): com_github_gorilla_mux()", "bool pairs. Returns: boolean indicating whether the repository should be", "bazel_skylib() if should_create_repository(\"com_github_blang_semver\", kwargs): com_github_blang_semver() if should_create_repository(\"com_github_gorilla_context\", kwargs): com_github_gorilla_context() if", "repository to be disabled by either an \"omit_\" _+ name", "# The Apache Software License, Version 2.0 ) def org_json():", "\"\"\"Defines external repositories required by Webtesting Rules. This function exists", "should_create_repository(\"com_google_code_findbugs_jsr305\", kwargs): com_google_code_findbugs_jsr305() if should_create_repository(\"com_google_code_gson\", kwargs): com_google_code_gson() if should_create_repository( \"com_google_errorprone_error_prone_annotations\",", "java_import_external( name = \"com_squareup_okio\", jar_sha256 = \"79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737\", jar_urls = [", "= [\"notice\"], # Apache License, Version 2.0 ) def org_apache_httpcomponents_httpclient():", "\"bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a\", urls = [ \"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", \"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", ], ) def com_github_blang_semver():", "\"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", ], licenses = [\"notice\"], # Apache License, Version", "= \"org_jetbrains_kotlin_stdlib\", jar_sha256 = \"62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", \"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\",", "= \"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32\", macos_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip\", ], windows_sha256 = \"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c\",", "be used on an experimental basis; projects should define their", "\"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", \"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", ], licenses = [\"notice\"],", "\"4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", \"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", ], licenses = [\"notice\"],", "sha256 = \"0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3\", strip_prefix = \"mux-1.6.2\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz\",", "java_import_external( name = \"com_google_errorprone_error_prone_annotations\", jar_sha256 = \"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120\", jar_urls = [", "used on an experimental basis; projects should define their own", "\"com_google_guava\", jar_sha256 = \"a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar\", \"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar\", ],", "[ \"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz\", ], macos_sha256 = \"920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66\", macos_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-osx.zip\",", "are unknown: \" + str(kwargs.keys())) def should_create_repository(name, args): \"\"\"Returns whether", "= [\"notice\"], # Apache 2.0 exports = [ \"@com_google_code_findbugs_jsr305\", \"@com_google_errorprone_error_prone_annotations\",", "[ \"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip\", ], ) def org_chromium_chromium(): platform_http_file( name = \"org_chromium_chromium\",", "dict will be mutated to remove \"omit_\" + name. Args:", "\"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", \"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", ], licenses = [\"reciprocal\"],", "= \"org_mozilla_firefox\", licenses = [\"reciprocal\"], # MPL 2.0 amd64_sha256 =", "about declaring their own direct dependencies, or when another Bazel", "License for the specific language governing permissions and # limitations", "= [\"reciprocal\"], # MPL 2.0 amd64_sha256 = \"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6\", amd64_urls =", "may be used by calling the individual functions this method", "[ \"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", \"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", ], ) def com_github_blang_semver(): go_repository( name =", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", \"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", ], licenses = [\"notice\"], #", "BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License", "Configure repositories for //browser/sauce:chrome-win10. \"\"\" if chromium: org_chromium_chromedriver() org_chromium_chromium() if", "strip_prefix = \"semver-3.5.1\", urls = [ \"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz\", \"https://github.com/blang/semver/archive/v3.5.1.tar.gz\", ], )", "basis; projects should define their own browsers. Args: firefox: Configure", "whether the repository should be created. \"\"\" key = \"omit_\"", "for //browsers:firefox-native. chromium: Configure repositories for //browsers:chromium-native. sauce: Configure repositories", "Apache 2.0 exports = [ \"@com_google_code_findbugs_jsr305\", \"@com_google_errorprone_error_prone_annotations\", ], ) def", "java_import_external( name = \"commons_logging\", jar_sha256 = \"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636\", jar_urls = [", "repositories for //browsers:chromium-native. sauce: Configure repositories for //browser/sauce:chrome-win10. \"\"\" if", "parameters used to prevent importing specific dependencies. \"\"\" if should_create_repository(\"bazel_skylib\",", "Apache 2.0 deps = [ \"@com_squareup_okio\", \"@com_google_code_findbugs_jsr305\", ], ) def", "2.0 ) def com_google_guava(): java_import_external( name = \"com_google_guava\", jar_sha256 =", "org_apache_httpcomponents_httpcore(): java_import_external( name = \"org_apache_httpcomponents_httpcore\", jar_sha256 = \"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5\", jar_urls =", "= [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip\", ], windows_sha256 = \"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c\", windows_urls = [", "\"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de\", amd64_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", ], macos_sha256 = \"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5\",", "rules_closure) that defines the same dependencies as this one (e.g.", "kwargs): commons_codec() if should_create_repository(\"commons_logging\", kwargs): commons_logging() if should_create_repository(\"junit\", kwargs): junit()", "1, ) def org_seleniumhq_selenium_remote_driver(): java_import_external( name = \"org_seleniumhq_selenium_remote_driver\", jar_sha256 =", "= \"dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52\", amd64_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz\", ], macos_sha256 = \"920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66\",", "com_github_gorilla_context() if should_create_repository(\"com_github_gorilla_mux\", kwargs): com_github_gorilla_mux() if should_create_repository(\"com_github_tebeka_selenium\", kwargs): com_github_tebeka_selenium() if", "Version 2.0 ) def org_apache_httpcomponents_httpclient(): java_import_external( name = \"org_apache_httpcomponents_httpclient\", jar_sha256", "[ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", ], ) def org_mozilla_geckodriver(): platform_http_file( name =", "should only be used on an experimental basis; projects should", "\"github.com/gorilla/mux\", sha256 = \"0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3\", strip_prefix = \"mux-1.6.2\", urls = [", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", \"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", ], licenses = [\"notice\"], # MIT-style", "review process. They must # be greppable for that to", "their own direct dependencies, or when another Bazel project is", "com_google_errorprone_error_prone_annotations() if should_create_repository(\"com_google_guava\", kwargs): com_google_guava() if should_create_repository(\"com_squareup_okhttp3_okhttp\", kwargs): com_squareup_okhttp3_okhttp() if", "def net_bytebuddy(): java_import_external( name = \"net_bytebuddy\", jar_sha256 = \"4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2\", jar_urls", "<gh_stars>0 # Copyright 2016 Google Inc. # # Licensed under", "OF ANY KIND, either express or implied. # See the", "\"org_chromium_chromedriver\", licenses = [\"reciprocal\"], # BSD 3-clause, ICU, MPL 1.1,", "com_github_tebeka_selenium(): go_repository( name = \"com_github_tebeka_selenium\", importpath = \"github.com/tebeka/selenium\", sha256 =", "See the License for the specific language governing permissions and", "def org_json(): java_import_external( name = \"org_json\", jar_sha256 = \"518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8\", jar_urls", "defined before defining a new repository. Alternatively, individual dependencies may", "= [\"notice\"], # The Apache Software License, Version 2.0 testonly_", "to in writing, software # distributed under the License is", "parameters are unknown: \" + str(kwargs.keys())) def should_create_repository(name, args): \"\"\"Returns", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", \"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", ], licenses = [\"notice\"], # MIT-style license", "= [ \"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip\", ], ) def org_chromium_chromium(): platform_http_file( name =", "should_create_repository(\"com_squareup_okhttp3_okhttp\", kwargs): com_squareup_okhttp3_okhttp() if should_create_repository(\"com_squareup_okio\", kwargs): com_squareup_okio() if should_create_repository(\"commons_codec\", kwargs):", "should_create_repository(\"junit\", kwargs): junit() if should_create_repository(\"net_bytebuddy\", kwargs): net_bytebuddy() if should_create_repository(\"org_apache_commons_exec\", kwargs):", "or agreed to in writing, software # distributed under the", "\"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", ], ) def org_seleniumhq_py(): http_archive( name = \"org_seleniumhq_py\", build_file", "should_create_repository(\"com_github_tebeka_selenium\", kwargs): com_github_tebeka_selenium() if should_create_repository(\"com_github_urllib3\", kwargs): com_github_urllib3() if should_create_repository(\"com_google_code_findbugs_jsr305\", kwargs):", "if should_create_repository(\"junit\", kwargs): junit() if should_create_repository(\"net_bytebuddy\", kwargs): net_bytebuddy() if should_create_repository(\"org_apache_commons_exec\",", "kwargs): org_apache_httpcomponents_httpclient() if should_create_repository(\"org_apache_httpcomponents_httpcore\", kwargs): org_apache_httpcomponents_httpcore() if should_create_repository(\"org_hamcrest_core\", kwargs): org_hamcrest_core()", "\"urllib3-1.23\", urls = [ \"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz\", ], ) def com_google_code_findbugs_jsr305(): java_import_external(", "kwargs): org_apache_commons_exec() if should_create_repository(\"org_apache_httpcomponents_httpclient\", kwargs): org_apache_httpcomponents_httpclient() if should_create_repository(\"org_apache_httpcomponents_httpcore\", kwargs): org_apache_httpcomponents_httpcore()", "should_create_repository(\"org_apache_httpcomponents_httpclient\", kwargs): org_apache_httpcomponents_httpclient() if should_create_repository(\"org_apache_httpcomponents_httpcore\", kwargs): org_apache_httpcomponents_httpcore() if should_create_repository(\"org_hamcrest_core\", kwargs):", "repository. Alternatively, individual dependencies may be excluded with an \"omit_\"", "], windows_sha256 = \"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c\", windows_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip\", ], )", "browsers. Args: firefox: Configure repositories for //browsers:firefox-native. chromium: Configure repositories", "fast failover. def web_test_repositories(**kwargs): \"\"\"Defines external repositories required by Webtesting", "compliance with the License. # You may obtain a copy", "= \"selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a\", urls = [ \"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", \"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", ], ) def", "= \"62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", \"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", ], licenses =", "args dict will be mutated to remove \"omit_\" + name.", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", \"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", ], licenses = [\"reciprocal\"], #", "following parameters are unknown: \" + str(kwargs.keys())) def should_create_repository(name, args):", "= [\"notice\"], # New BSD License testonly_ = 1, )", "name = \"commons_codec\", jar_sha256 = \"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\",", "2.0 ) def org_chromium_chromedriver(): platform_http_file( name = \"org_chromium_chromedriver\", licenses =", "Apache Software License, Version 2.0 ) def com_google_errorprone_error_prone_annotations(): java_import_external( name", "jar_sha256 = \"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", \"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", ], licenses", "not use this file except in compliance with the License.", "= \"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", \"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", ], licenses =", "\"com_github_urllib3\", build_file = str(Label(\"//build_files:com_github_urllib3.BUILD\")), sha256 = \"a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf\", strip_prefix = \"urllib3-1.23\",", "you may not use this file except in compliance with", "under the License. \"\"\"Defines external repositories needed by rules_webtesting.\"\"\" load(\"//web/internal:platform_http_file.bzl\",", "org_mozilla_firefox(): platform_http_file( name = \"org_mozilla_firefox\", licenses = [\"reciprocal\"], # MPL", "\"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", ], jar_sha256 = \"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7\", licenses = [\"notice\"], # BSD", "Bazel projects to call from their WORKSPACE file when depending", "= \"com_squareup_okhttp3_okhttp\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", \"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", ], jar_sha256 =", "\"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip\", ], ) def org_hamcrest_core(): java_import_external( name = \"org_hamcrest_core\", jar_sha256", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", \"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", ], licenses = [\"notice\"], # Apache", "# be greppable for that to happen. It's OK to", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", \"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", ], licenses = [\"notice\"], #", "\"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip\", ], windows_sha256 = \"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c\", windows_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip\", ],", "for users who want to be rigorous about declaring their", "def com_squareup_okhttp3_okhttp(): java_import_external( name = \"com_squareup_okhttp3_okhttp\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\",", "= \"selenium-3.14.0\", urls = [ \"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz\", ], ) def org_seleniumhq_selenium_api():", "name = \"org_jetbrains_kotlin_stdlib\", jar_sha256 = \"62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\",", "args.pop(key) if val: return False if native.existing_rule(name): return False return", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "macos_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-osx.zip\", ], windows_sha256 = \"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5\", windows_urls =", "\"\"\" if should_create_repository(\"bazel_skylib\", kwargs): bazel_skylib() if should_create_repository(\"com_github_blang_semver\", kwargs): com_github_blang_semver() if", "print(\"The following parameters are unknown: \" + str(kwargs.keys())) def should_create_repository(name,", "Software License, Version 2.0 testonly_ = 1, deps = [", "[\"notice\"], # Apache 2.0 exports = [ \"@com_google_code_findbugs_jsr305\", \"@com_google_errorprone_error_prone_annotations\", ],", "parameter. This is useful for users who want to be", "that while these dependencies are defined, they are not actually", "= \"com_github_gorilla_mux\", importpath = \"github.com/gorilla/mux\", sha256 = \"0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3\", strip_prefix =", "jar_sha256 = \"a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar\", \"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar\", ], licenses", "correctly formatted. Bazel's downloader # has fast failover. def web_test_repositories(**kwargs):", "own direct dependencies, or when another Bazel project is depended", "[ \"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz\", \"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz\", ], ) def com_github_tebeka_selenium(): go_repository( name =", "firefox: org_mozilla_firefox() org_mozilla_geckodriver() if sauce: com_saucelabs_sauce_connect() def bazel_skylib(): http_archive( name", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", \"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", ], licenses = [\"notice\"], # The", "\"f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40\", strip_prefix = \"selenium-3.14.0\", urls = [ \"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz\", ], )", "creation of a repository to be disabled by either an", "Apache Software License, Version 2.0 testonly_ = 1, deps =", "name = \"org_json\", jar_sha256 = \"518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\",", "name = \"org_hamcrest_core\", jar_sha256 = \"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\",", "before defining a new repository. Alternatively, individual dependencies may be", "[ \"@com_google_code_findbugs_jsr305\", \"@org_jetbrains_kotlin_stdlib\", ], ) def commons_codec(): java_import_external( name =", "[\"notice\"], # MIT-style license ) def org_mozilla_firefox(): platform_http_file( name =", "= \"commons_codec\", jar_sha256 = \"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", \"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\",", "def junit(): java_import_external( name = \"junit\", jar_sha256 = \"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a\", jar_urls", "previously defining a rule for the repository. The args dict", "False if native.existing_rule(name): return False return True def browser_repositories(firefox =", "= [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip\", ], macos_sha256 = \"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32\", macos_urls = [", "\"@com_google_code_findbugs_jsr305\", \"@org_jetbrains_kotlin_stdlib\", ], ) def commons_codec(): java_import_external( name = \"commons_codec\",", "\"omit_\" _+ name parameter or by previously defining a rule", "= [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip\", ], windows_sha256 = \"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e\", windows_urls = [", "should_create_repository(\"org_seleniumhq_selenium_remote_driver\", kwargs): org_seleniumhq_selenium_remote_driver() if kwargs.keys(): print(\"The following parameters are unknown:", "should_create_repository(\"net_bytebuddy\", kwargs): net_bytebuddy() if should_create_repository(\"org_apache_commons_exec\", kwargs): org_apache_commons_exec() if should_create_repository(\"org_apache_httpcomponents_httpclient\", kwargs):", "//browser/sauce:chrome-win10. \"\"\" if chromium: org_chromium_chromedriver() org_chromium_chromium() if firefox: org_mozilla_firefox() org_mozilla_geckodriver()", "remove \"omit_\" + name. Args: name: The name of the", "def com_squareup_okio(): java_import_external( name = \"com_squareup_okio\", jar_sha256 = \"79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737\", jar_urls", "defines the same dependencies as this one (e.g. com_google_guava.) Alternatively,", "dependencies into the parent workspace. This will check to see", "\"org_hamcrest_core\", jar_sha256 = \"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", \"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", ],", "\"@com_google_errorprone_error_prone_annotations\", ], ) def com_saucelabs_sauce_connect(): platform_http_file( name = \"com_saucelabs_sauce_connect\", licenses", "defining a new repository. Alternatively, individual dependencies may be excluded", "License v. 2.0, BSD 2-clause, MIT amd64_sha256 = \"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7\", amd64_urls", "= \"org_json\", jar_sha256 = \"518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", \"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\",", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "mirrored by an asynchronous review process. They must # be", "to see if a repository has been previously defined before", "= [ \"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", \"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", ], ) def com_github_urllib3(): http_archive( name", "licenses = [\"by_exception_only\"], # SauceLabs EULA amd64_sha256 = \"dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52\", amd64_urls", "java_import_external( name = \"org_seleniumhq_selenium_remote_driver\", jar_sha256 = \"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2\", jar_urls = [", ") def com_squareup_okhttp3_okhttp(): java_import_external( name = \"com_squareup_okhttp3_okhttp\", jar_urls = [", "\"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae\", macos_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip\", ], windows_sha256 = \"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e\", windows_urls", "java_import_external( name = \"com_squareup_okhttp3_okhttp\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", \"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", ],", "should_create_repository(\"org_hamcrest_core\", kwargs): org_hamcrest_core() if should_create_repository(\"org_jetbrains_kotlin_stdlib\", kwargs): org_jetbrains_kotlin_stdlib() if should_create_repository(\"org_json\", kwargs):", "these transitive dependencies into the parent workspace. This will check", "parameter or by previously defining a rule for the repository.", "if native.existing_rule(name): return False return True def browser_repositories(firefox = False,", "[ \"@com_google_code_findbugs_jsr305\", \"@com_google_errorprone_error_prone_annotations\", ], ) def com_saucelabs_sauce_connect(): platform_http_file( name =", "these dependencies are defined, they are not actually downloaded, unless", "\"commons_codec\", jar_sha256 = \"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", \"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", ],", "for other Bazel projects to call from their WORKSPACE file", "Alternatively, a whitelist model may be used by calling the", "\"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", ], licenses = [\"notice\"], # Apache License, Version 2.0", "used to prevent importing specific dependencies. \"\"\" if should_create_repository(\"bazel_skylib\", kwargs):", "name: The name of the repository that should be checked.", "file except in compliance with the License. # You may", "so long as they're correctly formatted. Bazel's downloader # has", "= [\"notice\"], # Apache 2.0 deps = [ \"@com_squareup_okio\", \"@com_google_code_findbugs_jsr305\",", "depends on them. Args: **kwargs: omit_... parameters used to prevent", "ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0,", "deps = [ \"@com_google_code_gson\", \"@com_google_guava\", \"@net_bytebuddy\", \"@com_squareup_okhttp3_okhttp\", \"@com_squareup_okio\", \"@commons_codec\", \"@commons_logging\",", "unknown: \" + str(kwargs.keys())) def should_create_repository(name, args): \"\"\"Returns whether the", "\"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", ], ) def com_github_urllib3(): http_archive( name = \"com_github_urllib3\", build_file", "= \"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7\", licenses = [\"notice\"], # BSD 3-clause ) def", "+ name. Args: name: The name of the repository that", "should_create_repository(\"org_seleniumhq_py\", kwargs): org_seleniumhq_py() if should_create_repository(\"org_seleniumhq_selenium_api\", kwargs): org_seleniumhq_selenium_api() if should_create_repository(\"org_seleniumhq_selenium_remote_driver\", kwargs):", "licenses = [\"notice\"], # Apache 2.0 deps = [\"@com_google_code_findbugs_jsr305\"], )", "], ) def com_github_gorilla_context(): go_repository( name = \"com_github_gorilla_context\", importpath =", "], ) def org_mozilla_geckodriver(): platform_http_file( name = \"org_mozilla_geckodriver\", licenses =", "= \"920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66\", macos_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-osx.zip\", ], windows_sha256 = \"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5\",", "importing specific dependencies. \"\"\" if should_create_repository(\"bazel_skylib\", kwargs): bazel_skylib() if should_create_repository(\"com_github_blang_semver\",", "= [\"notice\"], # Apache 2.0 ) def com_google_guava(): java_import_external( name", "name = \"com_github_urllib3\", build_file = str(Label(\"//build_files:com_github_urllib3.BUILD\")), sha256 = \"a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf\", strip_prefix", "prevent importing specific dependencies. \"\"\" if should_create_repository(\"bazel_skylib\", kwargs): bazel_skylib() if", "to remove \"omit_\" + name. Args: name: The name of", "method references. Please note that while these dependencies are defined,", "urls = [ \"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz\", ], ) def com_google_code_findbugs_jsr305(): java_import_external( name", "amd64_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip\", ], macos_sha256 = \"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32\", macos_urls =", "repositories for browsers defined in //browsers/.... This should only be", "java_import_external( name = \"org_json\", jar_sha256 = \"518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8\", jar_urls = [", "windows_sha256 = \"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e\", windows_urls = [ \"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip\", ], ) def", "sha256 = \"f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40\", strip_prefix = \"selenium-3.14.0\", urls = [ \"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz\",", "KIND, either express or implied. # See the License for", "function makes it easy to import these transitive dependencies into", "calling the individual functions this method references. Please note that", "= \"com_github_gorilla_context\", importpath = \"github.com/gorilla/context\", sha256 = \"2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03\", strip_prefix =", "of the repository that should be checked. args: A dictionary", "\"omit_\" + name if key in args: val = args.pop(key)", "[\"notice\"], # The Apache Software License, Version 2.0 testonly_ =", "be created. \"\"\" key = \"omit_\" + name if key", "Google Inc. # # Licensed under the Apache License, Version", "(the \"License\"); # you may not use this file except", "com_google_guava(): java_import_external( name = \"com_google_guava\", jar_sha256 = \"a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c\", jar_urls =", "[\"notice\"], # Apache License, Version 2.0 ) def org_apache_httpcomponents_httpclient(): java_import_external(", "1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause,", "v. 2.0, BSD 2-clause, MIT amd64_sha256 = \"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7\", amd64_urls =", "= [\"notice\"], # Apache License, Version 2.0 deps = [", "repositories for //browsers:firefox-native. chromium: Configure repositories for //browsers:chromium-native. sauce: Configure", "Please note that while these dependencies are defined, they are", "\"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", ], licenses = [\"notice\"], # The Apache Software License,", "License, Version 2.0 ) def org_apache_httpcomponents_httpclient(): java_import_external( name = \"org_apache_httpcomponents_httpclient\",", "This function exists for other Bazel projects to call from", "= [ \"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz\", ], macos_sha256 = \"920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66\", macos_urls = [", "macos_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip\", ], windows_sha256 = \"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e\", windows_urls =", "licenses = [\"reciprocal\"], # MPL 2.0 amd64_sha256 = \"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de\", amd64_urls", "\"com_google_errorprone_error_prone_annotations\", kwargs, ): com_google_errorprone_error_prone_annotations() if should_create_repository(\"com_google_guava\", kwargs): com_google_guava() if should_create_repository(\"com_squareup_okhttp3_okhttp\",", "kwargs): com_github_tebeka_selenium() if should_create_repository(\"com_github_urllib3\", kwargs): com_github_urllib3() if should_create_repository(\"com_google_code_findbugs_jsr305\", kwargs): com_google_code_findbugs_jsr305()", "], licenses = [\"notice\"], # The Apache Software License, Version", "platform_http_file( name = \"org_chromium_chromium\", licenses = [\"notice\"], # BSD 3-clause", "# # Unless required by applicable law or agreed to", "allows creation of a repository to be disabled by either", "\"selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a\", urls = [ \"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", \"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", ], ) def com_github_urllib3():", "1.0 testonly_ = 1, deps = [\"@org_hamcrest_core\"], ) def net_bytebuddy():", "amd64_sha256 = \"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6\", amd64_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", ], macos_sha256", "strip_prefix = \"context-1.1.1\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz\", \"https://github.com/gorilla/context/archive/v1.1.1.tar.gz\", ], )", "go_repository( name = \"com_github_gorilla_mux\", importpath = \"github.com/gorilla/mux\", sha256 = \"0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3\",", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", ], licenses = [\"notice\"], # Apache License,", "other Bazel projects to call from their WORKSPACE file when", "org_apache_commons_exec(): java_import_external( name = \"org_apache_commons_exec\", jar_sha256 = \"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b\", jar_urls =", "\"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", \"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", ], licenses = [\"notice\"], # MIT-style license )", "\"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar\", ], licenses = [\"notice\"], # Apache 2.0 exports =", "MIT-style license ) def org_mozilla_firefox(): platform_http_file( name = \"org_mozilla_firefox\", licenses", "\"junit\", jar_sha256 = \"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", \"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", ],", "com_saucelabs_sauce_connect() def bazel_skylib(): http_archive( name = \"bazel_skylib\", sha256 = \"\",", "implied. # See the License for the specific language governing", "[\"reciprocal\"], # MPL 2.0 amd64_sha256 = \"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de\", amd64_urls = [", "= [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", ], ) def org_seleniumhq_py(): http_archive( name", "= \"org_seleniumhq_py\", build_file = str(Label(\"//build_files:org_seleniumhq_py.BUILD\")), sha256 = \"f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40\", strip_prefix =", "repositories required by Webtesting Rules. This function exists for other", "= \"\", strip_prefix = \"bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a\", urls = [ \"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", \"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\",", "= \"github.com/tebeka/selenium\", sha256 = \"c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd\", strip_prefix = \"selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a\", urls =", "def com_github_blang_semver(): go_repository( name = \"com_github_blang_semver\", importpath = \"github.com/blang/semver\", sha256", "should_create_repository(\"org_apache_commons_exec\", kwargs): org_apache_commons_exec() if should_create_repository(\"org_apache_httpcomponents_httpclient\", kwargs): org_apache_httpcomponents_httpclient() if should_create_repository(\"org_apache_httpcomponents_httpcore\", kwargs):", "\"\"\"Defines external repositories needed by rules_webtesting.\"\"\" load(\"//web/internal:platform_http_file.bzl\", \"platform_http_file\") load(\"@bazel_gazelle//:deps.bzl\", \"go_repository\")", ") def com_github_gorilla_mux(): go_repository( name = \"com_github_gorilla_mux\", importpath = \"github.com/gorilla/mux\",", "org_seleniumhq_selenium_remote_driver(): java_import_external( name = \"org_seleniumhq_selenium_remote_driver\", jar_sha256 = \"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2\", jar_urls =", "bazel_skylib(): http_archive( name = \"bazel_skylib\", sha256 = \"\", strip_prefix =", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", \"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", ], licenses = [\"notice\"], #", "java_import_external( name = \"org_apache_httpcomponents_httpcore\", jar_sha256 = \"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5\", jar_urls = [", "= \"2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03\", strip_prefix = \"context-1.1.1\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz\", \"https://github.com/gorilla/context/archive/v1.1.1.tar.gz\",", "easy to import these transitive dependencies into the parent workspace.", "if a repository has been previously defined before defining a", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", ], licenses = [\"notice\"], #", "NOTE: URLs are mirrored by an asynchronous review process. They", ") def org_chromium_chromium(): platform_http_file( name = \"org_chromium_chromium\", licenses = [\"notice\"],", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", \"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", ], jar_sha256 = \"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e\", licenses", "want to be rigorous about declaring their own direct dependencies,", "org_json(): java_import_external( name = \"org_json\", jar_sha256 = \"518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8\", jar_urls =", "= 1, deps = [ \"@com_google_code_gson\", \"@com_google_guava\", \"@net_bytebuddy\", \"@com_squareup_okhttp3_okhttp\", \"@com_squareup_okio\",", "kwargs): com_google_code_gson() if should_create_repository( \"com_google_errorprone_error_prone_annotations\", kwargs, ): com_google_errorprone_error_prone_annotations() if should_create_repository(\"com_google_guava\",", "if should_create_repository(\"com_github_urllib3\", kwargs): com_github_urllib3() if should_create_repository(\"com_google_code_findbugs_jsr305\", kwargs): com_google_code_findbugs_jsr305() if should_create_repository(\"com_google_code_gson\",", "Unless required by applicable law or agreed to in writing,", "should_create_repository( \"com_google_errorprone_error_prone_annotations\", kwargs, ): com_google_errorprone_error_prone_annotations() if should_create_repository(\"com_google_guava\", kwargs): com_google_guava() if", "org_seleniumhq_py() if should_create_repository(\"org_seleniumhq_selenium_api\", kwargs): org_seleniumhq_selenium_api() if should_create_repository(\"org_seleniumhq_selenium_remote_driver\", kwargs): org_seleniumhq_selenium_remote_driver() if", "Apache Software License, Version 2.0 testonly_ = 1, ) def", "should_create_repository(\"commons_logging\", kwargs): commons_logging() if should_create_repository(\"junit\", kwargs): junit() if should_create_repository(\"net_bytebuddy\", kwargs):", "the specific language governing permissions and # limitations under the", "= \"junit\", jar_sha256 = \"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", \"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\",", "http_archive( name = \"org_seleniumhq_py\", build_file = str(Label(\"//build_files:org_seleniumhq_py.BUILD\")), sha256 = \"f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40\",", "in //browsers/.... This should only be used on an experimental", "to be disabled by either an \"omit_\" _+ name parameter", "], ) def com_github_urllib3(): http_archive( name = \"com_github_urllib3\", build_file =", "long as they're correctly formatted. Bazel's downloader # has fast", "License, Version 2.0 ) def org_json(): java_import_external( name = \"org_json\",", "[\"by_exception_only\"], # SauceLabs EULA amd64_sha256 = \"dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52\", amd64_urls = [", "if should_create_repository(\"com_github_gorilla_mux\", kwargs): com_github_gorilla_mux() if should_create_repository(\"com_github_tebeka_selenium\", kwargs): com_github_tebeka_selenium() if should_create_repository(\"com_github_urllib3\",", "define their own browsers. Args: firefox: Configure repositories for //browsers:firefox-native.", "= \"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e\", licenses = [\"notice\"], # Apache 2.0 deps =", "], licenses = [\"notice\"], # Apache 2.0 deps = [", "\"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8\", amd64_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip\", ], macos_sha256 = \"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32\", macos_urls", "if firefox: org_mozilla_firefox() org_mozilla_geckodriver() if sauce: com_saucelabs_sauce_connect() def bazel_skylib(): http_archive(", "be rigorous about declaring their own direct dependencies, or when", "str(Label(\"//build_files:org_seleniumhq_py.BUILD\")), sha256 = \"f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40\", strip_prefix = \"selenium-3.14.0\", urls = [", "[ \"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz\", ], ) def com_google_code_findbugs_jsr305(): java_import_external( name = \"com_google_code_findbugs_jsr305\",", "3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v.", "[ \"https://saucelabs.com/downloads/sc-4.4.12-win32.zip\", ], ) def com_squareup_okhttp3_okhttp(): java_import_external( name = \"com_squareup_okhttp3_okhttp\",", "2.0 deps = [ \"@com_google_code_findbugs_jsr305\", \"@org_jetbrains_kotlin_stdlib\", ], ) def commons_codec():", "= \"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", ], licenses =", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", \"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", ], licenses = [\"notice\"], # Apache", "name = \"org_mozilla_geckodriver\", licenses = [\"reciprocal\"], # MPL 2.0 amd64_sha256", "kwargs): com_google_guava() if should_create_repository(\"com_squareup_okhttp3_okhttp\", kwargs): com_squareup_okhttp3_okhttp() if should_create_repository(\"com_squareup_okio\", kwargs): com_squareup_okio()", "into the parent workspace. This will check to see if", "amd64_sha256 = \"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8\", amd64_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip\", ], macos_sha256 =", "org_seleniumhq_selenium_api() if should_create_repository(\"org_seleniumhq_selenium_remote_driver\", kwargs): org_seleniumhq_selenium_remote_driver() if kwargs.keys(): print(\"The following parameters", "(e.g. rules_closure) that defines the same dependencies as this one", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", \"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", ], licenses = [\"notice\"], # Apache", "def org_apache_commons_exec(): java_import_external( name = \"org_apache_commons_exec\", jar_sha256 = \"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b\", jar_urls", "\"selenium-3.14.0\", urls = [ \"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz\", ], ) def org_seleniumhq_selenium_api(): java_import_external(", "if key in args: val = args.pop(key) if val: return", "= \"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5\", macos_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", ], ) def", "\"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", \"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", ], licenses = [\"notice\"], # Apache License, Version", "= \"79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", \"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", ], licenses =", "should_create_repository(\"com_github_blang_semver\", kwargs): com_github_blang_semver() if should_create_repository(\"com_github_gorilla_context\", kwargs): com_github_gorilla_context() if should_create_repository(\"com_github_gorilla_mux\", kwargs):", "should define their own browsers. Args: firefox: Configure repositories for", "name = \"org_apache_httpcomponents_httpclient\", jar_sha256 = \"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\",", "[ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", ], macos_sha256 = \"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce\", macos_urls = [", "\"com_github_blang_semver\", importpath = \"github.com/blang/semver\", sha256 = \"3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57\", strip_prefix = \"semver-3.5.1\",", "def com_github_urllib3(): http_archive( name = \"com_github_urllib3\", build_file = str(Label(\"//build_files:com_github_urllib3.BUILD\")), sha256", "on rules_webtesting using http_archive. This function makes it easy to", "urls = [ \"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz\", \"https://github.com/blang/semver/archive/v3.5.1.tar.gz\", ], ) def com_github_gorilla_context(): go_repository(", "a new repository. Alternatively, individual dependencies may be excluded with", "\"org_apache_httpcomponents_httpclient\", jar_sha256 = \"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", ],", "val: return False if native.existing_rule(name): return False return True def", "be used by calling the individual functions this method references.", "[ \"@org_apache_httpcomponents_httpcore\", \"@commons_logging\", \"@commons_codec\", ], ) def org_apache_httpcomponents_httpcore(): java_import_external( name", "macos_sha256 = \"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae\", macos_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip\", ], windows_sha256 =", "[ \"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz\", \"https://github.com/gorilla/context/archive/v1.1.1.tar.gz\", ], ) def com_github_gorilla_mux(): go_repository( name =", ") def com_google_errorprone_error_prone_annotations(): java_import_external( name = \"com_google_errorprone_error_prone_annotations\", jar_sha256 = \"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120\",", "= \"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5\", windows_urls = [ \"https://saucelabs.com/downloads/sc-4.4.12-win32.zip\", ], ) def com_squareup_okhttp3_okhttp():", "macos_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip\", ], windows_sha256 = \"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c\", windows_urls =", "macos_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", ], ) def org_seleniumhq_py(): http_archive(", "repository should be created. \"\"\" key = \"omit_\" + name", "= \"semver-3.5.1\", urls = [ \"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz\", \"https://github.com/blang/semver/archive/v3.5.1.tar.gz\", ], ) def", "if should_create_repository(\"org_apache_httpcomponents_httpclient\", kwargs): org_apache_httpcomponents_httpclient() if should_create_repository(\"org_apache_httpcomponents_httpcore\", kwargs): org_apache_httpcomponents_httpcore() if should_create_repository(\"org_hamcrest_core\",", "name parameter or by previously defining a rule for the", "= \"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7\", amd64_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip\", ], macos_sha256 = \"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae\",", "args: A dictionary that contains \"omit_...\": bool pairs. Returns: boolean", "kwargs): com_github_urllib3() if should_create_repository(\"com_google_code_findbugs_jsr305\", kwargs): com_google_code_findbugs_jsr305() if should_create_repository(\"com_google_code_gson\", kwargs): com_google_code_gson()", "The Apache Software License, Version 2.0 ) def junit(): java_import_external(", "\"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz\", ], ) def com_google_code_findbugs_jsr305(): java_import_external( name = \"com_google_code_findbugs_jsr305\", jar_urls", "kwargs): com_squareup_okio() if should_create_repository(\"commons_codec\", kwargs): commons_codec() if should_create_repository(\"commons_logging\", kwargs): commons_logging()", "org_mozilla_firefox() org_mozilla_geckodriver() if sauce: com_saucelabs_sauce_connect() def bazel_skylib(): http_archive( name =", "Version 2.0 ) def org_chromium_chromedriver(): platform_http_file( name = \"org_chromium_chromedriver\", licenses", "def org_mozilla_geckodriver(): platform_http_file( name = \"org_mozilla_geckodriver\", licenses = [\"reciprocal\"], #", "You may obtain a copy of the License at #", "to submit broken mirror # URLs, so long as they're", "2.0 testonly_ = 1, deps = [ \"@com_google_code_gson\", \"@com_google_guava\", \"@net_bytebuddy\",", "License, Version 2.0 ) def junit(): java_import_external( name = \"junit\",", "org_chromium_chromedriver() org_chromium_chromium() if firefox: org_mozilla_firefox() org_mozilla_geckodriver() if sauce: com_saucelabs_sauce_connect() def", "Version 2.0 testonly_ = 1, ) def org_seleniumhq_selenium_remote_driver(): java_import_external( name", "be disabled by either an \"omit_\" _+ name parameter or", "], licenses = [\"notice\"], # Apache 2.0 deps = [\"@com_google_code_findbugs_jsr305\"],", "macos_sha256 = \"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5\", macos_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", ], )", "\"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5\", macos_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", ], ) def org_mozilla_geckodriver():", "+ name if key in args: val = args.pop(key) if", "licenses = [\"notice\"], # Apache 2.0 deps = [ \"@com_squareup_okio\",", "//browsers:firefox-native. chromium: Configure repositories for //browsers:chromium-native. sauce: Configure repositories for", "= \"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", \"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", ], licenses =", "has fast failover. def web_test_repositories(**kwargs): \"\"\"Defines external repositories required by", "New BSD License testonly_ = 1, ) def org_jetbrains_kotlin_stdlib(): java_import_external(", "com_squareup_okio(): java_import_external( name = \"com_squareup_okio\", jar_sha256 = \"79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737\", jar_urls =", "name = \"org_apache_commons_exec\", jar_sha256 = \"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\",", "whether the name repository should be created. This allows creation", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "\"2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03\", strip_prefix = \"context-1.1.1\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz\", \"https://github.com/gorilla/context/archive/v1.1.1.tar.gz\", ],", "should be created. This allows creation of a repository to", "\"commons_logging\", jar_sha256 = \"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", \"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", ],", "def org_seleniumhq_selenium_api(): java_import_external( name = \"org_seleniumhq_selenium_api\", jar_sha256 = \"1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b\", jar_urls", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar\", \"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar\", ], licenses = [\"notice\"], # Apache 2.0", "should_create_repository(\"com_google_guava\", kwargs): com_google_guava() if should_create_repository(\"com_squareup_okhttp3_okhttp\", kwargs): com_squareup_okhttp3_okhttp() if should_create_repository(\"com_squareup_okio\", kwargs):", "depended upon (e.g. rules_closure) that defines the same dependencies as", "name = \"org_seleniumhq_selenium_api\", jar_sha256 = \"1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\",", "using http_archive. This function makes it easy to import these", "\"mux-1.6.2\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz\", \"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz\", ], ) def com_github_tebeka_selenium():", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", \"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", ], licenses = [\"notice\"], #", "file when depending on rules_webtesting using http_archive. This function makes", "an \"omit_\" + name parameter. This is useful for users", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "should be checked. args: A dictionary that contains \"omit_...\": bool", "License. # You may obtain a copy of the License", "\"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", ], ) def org_mozilla_geckodriver(): platform_http_file( name = \"org_mozilla_geckodriver\",", "amd64_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", ], macos_sha256 = \"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce\", macos_urls", "name = \"bazel_skylib\", sha256 = \"\", strip_prefix = \"bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a\", urls", "**kwargs: omit_... parameters used to prevent importing specific dependencies. \"\"\"", "org_apache_httpcomponents_httpcore() if should_create_repository(\"org_hamcrest_core\", kwargs): org_hamcrest_core() if should_create_repository(\"org_jetbrains_kotlin_stdlib\", kwargs): org_jetbrains_kotlin_stdlib() if", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", ], licenses = [\"notice\"], # The", "\"platform_http_file\") load(\"@bazel_gazelle//:deps.bzl\", \"go_repository\") load(\"@bazel_tools//tools/build_defs/repo:http.bzl\", \"http_archive\") load(\"@bazel_tools//tools/build_defs/repo:java.bzl\", \"java_import_external\") # NOTE: URLs", "java_import_external( name = \"org_seleniumhq_selenium_api\", jar_sha256 = \"1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b\", jar_urls = [", "checked. args: A dictionary that contains \"omit_...\": bool pairs. Returns:", "], ) def com_squareup_okio(): java_import_external( name = \"com_squareup_okio\", jar_sha256 =", "= [ \"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", \"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", ], ) def com_github_blang_semver(): go_repository( name", "\"context-1.1.1\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz\", \"https://github.com/gorilla/context/archive/v1.1.1.tar.gz\", ], ) def com_github_gorilla_mux():", "http_archive( name = \"com_github_urllib3\", build_file = str(Label(\"//build_files:com_github_urllib3.BUILD\")), sha256 = \"a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf\",", "Apache 2.0 deps = [ \"@com_google_code_findbugs_jsr305\", \"@org_jetbrains_kotlin_stdlib\", ], ) def", "them. Args: **kwargs: omit_... parameters used to prevent importing specific", "A dictionary that contains \"omit_...\": bool pairs. Returns: boolean indicating", "org_seleniumhq_selenium_api(): java_import_external( name = \"org_seleniumhq_selenium_api\", jar_sha256 = \"1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b\", jar_urls =", "failover. def web_test_repositories(**kwargs): \"\"\"Defines external repositories required by Webtesting Rules.", "and # limitations under the License. \"\"\"Defines external repositories needed", "org_apache_commons_exec() if should_create_repository(\"org_apache_httpcomponents_httpclient\", kwargs): org_apache_httpcomponents_httpclient() if should_create_repository(\"org_apache_httpcomponents_httpcore\", kwargs): org_apache_httpcomponents_httpcore() if", "\"github.com/blang/semver\", sha256 = \"3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57\", strip_prefix = \"semver-3.5.1\", urls = [", "\"bazel_skylib\", sha256 = \"\", strip_prefix = \"bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a\", urls = [", "direct dependencies, or when another Bazel project is depended upon", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", ], licenses = [\"notice\"], # Apache License,", "= [\"reciprocal\"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like),", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", ], licenses = [\"notice\"], # Apache License, Version 2.0", "downloaded, unless a target is built that depends on them.", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "Software License, Version 2.0 testonly_ = 1, ) def org_seleniumhq_selenium_remote_driver():", "name = \"junit\", jar_sha256 = \"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\",", "in args: val = args.pop(key) if val: return False if", "\"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", \"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", ], jar_sha256 = \"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e\", licenses = [\"notice\"], #", "Returns: boolean indicating whether the repository should be created. \"\"\"", "limitations under the License. \"\"\"Defines external repositories needed by rules_webtesting.\"\"\"", "should_create_repository(\"org_json\", kwargs): org_json() if should_create_repository(\"org_seleniumhq_py\", kwargs): org_seleniumhq_py() if should_create_repository(\"org_seleniumhq_selenium_api\", kwargs):", "required by applicable law or agreed to in writing, software", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "], jar_sha256 = \"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e\", licenses = [\"notice\"], # Apache 2.0", "\"com_github_tebeka_selenium\", importpath = \"github.com/tebeka/selenium\", sha256 = \"c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd\", strip_prefix = \"selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a\",", "\"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", ], licenses = [\"notice\"], # The Apache Software License,", "org_jetbrains_kotlin_stdlib(): java_import_external( name = \"org_jetbrains_kotlin_stdlib\", jar_sha256 = \"62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e\", jar_urls =", "platform_http_file( name = \"org_mozilla_firefox\", licenses = [\"reciprocal\"], # MPL 2.0", ") def org_chromium_chromedriver(): platform_http_file( name = \"org_chromium_chromedriver\", licenses = [\"reciprocal\"],", "is useful for users who want to be rigorous about", "= [\"notice\"], # MIT-style license ) def org_mozilla_firefox(): platform_http_file( name", "# Apache 2.0 exports = [ \"@com_google_code_findbugs_jsr305\", \"@com_google_errorprone_error_prone_annotations\", ], )", "= [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", ], macos_sha256 = \"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce\", macos_urls =", "agreed to in writing, software # distributed under the License", "distributed under the License is distributed on an \"AS IS\"", "depending on rules_webtesting using http_archive. This function makes it easy", "as this one (e.g. com_google_guava.) Alternatively, a whitelist model may", "the parent workspace. This will check to see if a", "testonly_ = 1, deps = [ \"@com_google_code_gson\", \"@com_google_guava\", \"@net_bytebuddy\", \"@com_squareup_okhttp3_okhttp\",", "= \"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c\", windows_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip\", ], ) def org_hamcrest_core():", "more?) amd64_sha256 = \"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8\", amd64_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip\", ], macos_sha256", "= \"org_hamcrest_core\", jar_sha256 = \"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", \"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\",", "\" + str(kwargs.keys())) def should_create_repository(name, args): \"\"\"Returns whether the name", "if should_create_repository(\"com_github_blang_semver\", kwargs): com_github_blang_semver() if should_create_repository(\"com_github_gorilla_context\", kwargs): com_github_gorilla_context() if should_create_repository(\"com_github_gorilla_mux\",", "if should_create_repository(\"org_apache_httpcomponents_httpcore\", kwargs): org_apache_httpcomponents_httpcore() if should_create_repository(\"org_hamcrest_core\", kwargs): org_hamcrest_core() if should_create_repository(\"org_jetbrains_kotlin_stdlib\",", "an asynchronous review process. They must # be greppable for", "dependencies. \"\"\" if should_create_repository(\"bazel_skylib\", kwargs): bazel_skylib() if should_create_repository(\"com_github_blang_semver\", kwargs): com_github_blang_semver()", "[ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip\", ], windows_sha256 = \"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c\", windows_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip\",", "name repository should be created. This allows creation of a", "org_jetbrains_kotlin_stdlib() if should_create_repository(\"org_json\", kwargs): org_json() if should_create_repository(\"org_seleniumhq_py\", kwargs): org_seleniumhq_py() if", "], licenses = [\"notice\"], # New BSD License testonly_ =", "URLs are mirrored by an asynchronous review process. They must", "= [ \"@org_apache_httpcomponents_httpcore\", \"@commons_logging\", \"@commons_codec\", ], ) def org_apache_httpcomponents_httpcore(): java_import_external(", "jar_sha256 = \"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", \"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", ], licenses", "name = \"net_bytebuddy\", jar_sha256 = \"4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\",", "\"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", \"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", ], licenses = [\"notice\"], # The Apache Software", "com_google_code_findbugs_jsr305() if should_create_repository(\"com_google_code_gson\", kwargs): com_google_code_gson() if should_create_repository( \"com_google_errorprone_error_prone_annotations\", kwargs, ):", "\"@com_google_code_findbugs_jsr305\", \"@com_google_errorprone_error_prone_annotations\", ], ) def com_saucelabs_sauce_connect(): platform_http_file( name = \"com_saucelabs_sauce_connect\",", "[ \"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", \"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", ], ) def com_github_urllib3(): http_archive( name =", "repositories for //browser/sauce:chrome-win10. \"\"\" if chromium: org_chromium_chromedriver() org_chromium_chromium() if firefox:", "BSD 3-clause ) def com_google_code_gson(): java_import_external( name = \"com_google_code_gson\", jar_sha256", "\"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e\", windows_urls = [ \"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip\", ], ) def org_chromium_chromium(): platform_http_file(", "windows_urls = [ \"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip\", ], ) def org_chromium_chromium(): platform_http_file( name", "Apache 2.0 ) def com_google_guava(): java_import_external( name = \"com_google_guava\", jar_sha256", "= \"commons_logging\", jar_sha256 = \"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", \"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\",", "# Apache 2.0 deps = [ \"@com_google_code_findbugs_jsr305\", \"@org_jetbrains_kotlin_stdlib\", ], )", "urls = [ \"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz\", \"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz\", ], ) def com_github_tebeka_selenium(): go_repository(", "whitelist model may be used by calling the individual functions", "kwargs): bazel_skylib() if should_create_repository(\"com_github_blang_semver\", kwargs): com_github_blang_semver() if should_create_repository(\"com_github_gorilla_context\", kwargs): com_github_gorilla_context()", "kwargs): junit() if should_create_repository(\"net_bytebuddy\", kwargs): net_bytebuddy() if should_create_repository(\"org_apache_commons_exec\", kwargs): org_apache_commons_exec()", "actually downloaded, unless a target is built that depends on", "OR CONDITIONS OF ANY KIND, either express or implied. #", "EULA amd64_sha256 = \"dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52\", amd64_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz\", ], macos_sha256", "the License is distributed on an \"AS IS\" BASIS, #", "= \"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e\", windows_urls = [ \"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip\", ], ) def org_chromium_chromium():", "\"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz\", ], ) def org_seleniumhq_selenium_api(): java_import_external( name = \"org_seleniumhq_selenium_api\", jar_sha256", "one (e.g. com_google_guava.) Alternatively, a whitelist model may be used", "java_import_external( name = \"junit\", jar_sha256 = \"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a\", jar_urls = [", "parent workspace. This will check to see if a repository", "this one (e.g. com_google_guava.) Alternatively, a whitelist model may be", ") def org_mozilla_firefox(): platform_http_file( name = \"org_mozilla_firefox\", licenses = [\"reciprocal\"],", "law or agreed to in writing, software # distributed under", "\"org_mozilla_firefox\", licenses = [\"reciprocal\"], # MPL 2.0 amd64_sha256 = \"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de\",", "= [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", ], macos_sha256 = \"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5\", macos_urls =", "macos_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", ], ) def org_mozilla_geckodriver(): platform_http_file(", "[\"notice\"], # New BSD License testonly_ = 1, ) def", "= \"org_seleniumhq_selenium_api\", jar_sha256 = \"1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\",", "to be rigorous about declaring their own direct dependencies, or", "they're correctly formatted. Bazel's downloader # has fast failover. def", "# The Apache Software License, Version 2.0 testonly_ = 1,", "\"@com_google_code_findbugs_jsr305\", ], ) def com_squareup_okio(): java_import_external( name = \"com_squareup_okio\", jar_sha256", "amd64_sha256 = \"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7\", amd64_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip\", ], macos_sha256 =", "\"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7\", licenses = [\"notice\"], # BSD 3-clause ) def com_google_code_gson():", "chromium: org_chromium_chromedriver() org_chromium_chromium() if firefox: org_mozilla_firefox() org_mozilla_geckodriver() if sauce: com_saucelabs_sauce_connect()", "= \"518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", \"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", ], licenses =", "may obtain a copy of the License at # #", "= \"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", \"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", ], licenses =", "], licenses = [\"notice\"], # Apache License, Version 2.0 )", "name = \"com_github_gorilla_mux\", importpath = \"github.com/gorilla/mux\", sha256 = \"0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3\", strip_prefix", "\"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", ], licenses = [\"notice\"],", "firefox: Configure repositories for //browsers:firefox-native. chromium: Configure repositories for //browsers:chromium-native.", "Version 2.0 ) def com_google_errorprone_error_prone_annotations(): java_import_external( name = \"com_google_errorprone_error_prone_annotations\", jar_sha256", "may not use this file except in compliance with the", "License 1.0 testonly_ = 1, deps = [\"@org_hamcrest_core\"], ) def", "\"@net_bytebuddy\", \"@com_squareup_okhttp3_okhttp\", \"@com_squareup_okio\", \"@commons_codec\", \"@commons_logging\", \"@org_apache_commons_exec\", \"@org_apache_httpcomponents_httpclient\", \"@org_apache_httpcomponents_httpcore\", \"@org_seleniumhq_selenium_api\", ],", "Copyright 2016 Google Inc. # # Licensed under the Apache", "formatted. Bazel's downloader # has fast failover. def web_test_repositories(**kwargs): \"\"\"Defines", "this file except in compliance with the License. # You", "com_google_errorprone_error_prone_annotations(): java_import_external( name = \"com_google_errorprone_error_prone_annotations\", jar_sha256 = \"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120\", jar_urls =", "\"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", \"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", ], licenses = [\"notice\"], # New BSD License", "return True def browser_repositories(firefox = False, chromium = False, sauce", "name = \"com_google_errorprone_error_prone_annotations\", jar_sha256 = \"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\",", "# # Licensed under the Apache License, Version 2.0 (the", "must # be greppable for that to happen. It's OK", "\"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz\", ], macos_sha256 = \"920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66\", macos_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-osx.zip\", ],", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "], ) def commons_codec(): java_import_external( name = \"commons_codec\", jar_sha256 =", "\"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce\", macos_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", ], ) def org_seleniumhq_py():", "\"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", \"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", ], licenses = [\"notice\"], # Apache License, Version", ") def com_github_tebeka_selenium(): go_repository( name = \"com_github_tebeka_selenium\", importpath = \"github.com/tebeka/selenium\",", "load(\"@bazel_tools//tools/build_defs/repo:http.bzl\", \"http_archive\") load(\"@bazel_tools//tools/build_defs/repo:java.bzl\", \"java_import_external\") # NOTE: URLs are mirrored by", "= \"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae\", macos_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip\", ], windows_sha256 = \"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e\",", "= \"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", \"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", ], licenses =", "chromium: Configure repositories for //browsers:chromium-native. sauce: Configure repositories for //browser/sauce:chrome-win10.", ") def org_seleniumhq_py(): http_archive( name = \"org_seleniumhq_py\", build_file = str(Label(\"//build_files:org_seleniumhq_py.BUILD\")),", "= \"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8\", amd64_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip\", ], macos_sha256 = \"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32\",", "(BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT amd64_sha256", "amd64_sha256 = \"dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52\", amd64_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz\", ], macos_sha256 =", "windows_urls = [ \"https://saucelabs.com/downloads/sc-4.4.12-win32.zip\", ], ) def com_squareup_okhttp3_okhttp(): java_import_external( name", "def org_chromium_chromedriver(): platform_http_file( name = \"org_chromium_chromedriver\", licenses = [\"reciprocal\"], #", "defined in //browsers/.... This should only be used on an", "references. Please note that while these dependencies are defined, they", "licenses = [\"notice\"], # Apache License, Version 2.0 deps =", "transitive dependencies into the parent workspace. This will check to", "org_mozilla_geckodriver() if sauce: com_saucelabs_sauce_connect() def bazel_skylib(): http_archive( name = \"bazel_skylib\",", "= args.pop(key) if val: return False if native.existing_rule(name): return False", "it easy to import these transitive dependencies into the parent", "\"\"\"Returns whether the name repository should be created. This allows", ") def com_squareup_okio(): java_import_external( name = \"com_squareup_okio\", jar_sha256 = \"79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737\",", "= [\"reciprocal\"], # Eclipse Public License 1.0 testonly_ = 1,", "The Apache Software License, Version 2.0 testonly_ = 1, deps", "\"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz\", \"https://github.com/blang/semver/archive/v3.5.1.tar.gz\", ], ) def com_github_gorilla_context(): go_repository( name = \"com_github_gorilla_context\",", "= \"com_google_guava\", jar_sha256 = \"a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar\", \"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar\",", "\"github.com/tebeka/selenium\", sha256 = \"c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd\", strip_prefix = \"selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a\", urls = [", "com_saucelabs_sauce_connect(): platform_http_file( name = \"com_saucelabs_sauce_connect\", licenses = [\"by_exception_only\"], # SauceLabs", "# BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free", "or implied. # See the License for the specific language", "def org_chromium_chromium(): platform_http_file( name = \"org_chromium_chromium\", licenses = [\"notice\"], #", ") def com_google_code_gson(): java_import_external( name = \"com_google_code_gson\", jar_sha256 = \"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81\",", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", \"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", ], licenses = [\"notice\"], # Apache", "2.0 ) def junit(): java_import_external( name = \"junit\", jar_sha256 =", "kwargs): org_hamcrest_core() if should_create_repository(\"org_jetbrains_kotlin_stdlib\", kwargs): org_jetbrains_kotlin_stdlib() if should_create_repository(\"org_json\", kwargs): org_json()", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", \"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", ], jar_sha256 = \"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e\", licenses =", "False, sauce = False): \"\"\"Sets up repositories for browsers defined", "args: val = args.pop(key) if val: return False if native.existing_rule(name):", "strip_prefix = \"mux-1.6.2\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz\", \"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz\", ], )", "\"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", ], licenses = [\"notice\"], # Apache License, Version", "= \"1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", ], licenses =", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", \"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", ], licenses = [\"notice\"], #", "chromium = False, sauce = False): \"\"\"Sets up repositories for", "= \"bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a\", urls = [ \"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", \"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", ], ) def", "\"com_saucelabs_sauce_connect\", licenses = [\"by_exception_only\"], # SauceLabs EULA amd64_sha256 = \"dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52\",", "= [ \"@com_google_code_findbugs_jsr305\", \"@com_google_errorprone_error_prone_annotations\", ], ) def com_saucelabs_sauce_connect(): platform_http_file( name", "= [ \"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz\", \"https://github.com/blang/semver/archive/v3.5.1.tar.gz\", ], ) def com_github_gorilla_context(): go_repository( name", "model may be used by calling the individual functions this", "be greppable for that to happen. It's OK to submit", "\"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip\", ], macos_sha256 = \"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32\", macos_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip\", ],", "2.0 ) def org_apache_httpcomponents_httpclient(): java_import_external( name = \"org_apache_httpcomponents_httpclient\", jar_sha256 =", "= [\"notice\"], # Apache 2.0 deps = [ \"@com_google_code_findbugs_jsr305\", \"@org_jetbrains_kotlin_stdlib\",", "key = \"omit_\" + name if key in args: val", "projects should define their own browsers. Args: firefox: Configure repositories", "= 1, ) def org_jetbrains_kotlin_stdlib(): java_import_external( name = \"org_jetbrains_kotlin_stdlib\", jar_sha256", "= [ \"https://saucelabs.com/downloads/sc-4.5.1-osx.zip\", ], windows_sha256 = \"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5\", windows_urls = [", "check to see if a repository has been previously defined", "defined, they are not actually downloaded, unless a target is", "[\"notice\"], # Apache License, Version 2.0 ) def commons_logging(): java_import_external(", "strip_prefix = \"urllib3-1.23\", urls = [ \"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz\", ], ) def", "kwargs): commons_logging() if should_create_repository(\"junit\", kwargs): junit() if should_create_repository(\"net_bytebuddy\", kwargs): net_bytebuddy()", "\"org_json\", jar_sha256 = \"518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", \"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", ],", "by calling the individual functions this method references. Please note", "= \"org_mozilla_geckodriver\", licenses = [\"reciprocal\"], # MPL 2.0 amd64_sha256 =", "\"org_apache_httpcomponents_httpcore\", jar_sha256 = \"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", ],", "either an \"omit_\" _+ name parameter or by previously defining", "macos_sha256 = \"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32\", macos_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip\", ], windows_sha256 =", "licenses = [\"reciprocal\"], # BSD 3-clause, ICU, MPL 1.1, libpng", "strip_prefix = \"selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a\", urls = [ \"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", \"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", ], )", "importpath = \"github.com/gorilla/context\", sha256 = \"2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03\", strip_prefix = \"context-1.1.1\", urls", "\"org_mozilla_geckodriver\", licenses = [\"reciprocal\"], # MPL 2.0 amd64_sha256 = \"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6\",", "[\"@com_google_code_findbugs_jsr305\"], ) def org_apache_commons_exec(): java_import_external( name = \"org_apache_commons_exec\", jar_sha256 =", "= [\"@org_hamcrest_core\"], ) def net_bytebuddy(): java_import_external( name = \"net_bytebuddy\", jar_sha256", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", \"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", ], licenses = [\"notice\"], #", "return False if native.existing_rule(name): return False return True def browser_repositories(firefox", "Args: firefox: Configure repositories for //browsers:firefox-native. chromium: Configure repositories for", "\"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5\", windows_urls = [ \"https://saucelabs.com/downloads/sc-4.4.12-win32.zip\", ], ) def com_squareup_okhttp3_okhttp(): java_import_external(", "= \"github.com/gorilla/mux\", sha256 = \"0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3\", strip_prefix = \"mux-1.6.2\", urls =", "or by previously defining a rule for the repository. The", "been previously defined before defining a new repository. Alternatively, individual", "kwargs): com_github_gorilla_context() if should_create_repository(\"com_github_gorilla_mux\", kwargs): com_github_gorilla_mux() if should_create_repository(\"com_github_tebeka_selenium\", kwargs): com_github_tebeka_selenium()", "\"@com_google_code_gson\", \"@com_google_guava\", \"@net_bytebuddy\", \"@com_squareup_okhttp3_okhttp\", \"@com_squareup_okio\", \"@commons_codec\", \"@commons_logging\", \"@org_apache_commons_exec\", \"@org_apache_httpcomponents_httpclient\", \"@org_apache_httpcomponents_httpcore\",", "licenses = [\"reciprocal\"], # MPL 2.0 amd64_sha256 = \"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6\", amd64_urls", "\"0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3\", strip_prefix = \"mux-1.6.2\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz\", \"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz\", ],", ") def com_github_blang_semver(): go_repository( name = \"com_github_blang_semver\", importpath = \"github.com/blang/semver\",", "= \"3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57\", strip_prefix = \"semver-3.5.1\", urls = [ \"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz\", \"https://github.com/blang/semver/archive/v3.5.1.tar.gz\",", "deps = [\"@org_hamcrest_core\"], ) def net_bytebuddy(): java_import_external( name = \"net_bytebuddy\",", "are not actually downloaded, unless a target is built that", "when another Bazel project is depended upon (e.g. rules_closure) that", "go_repository( name = \"com_github_blang_semver\", importpath = \"github.com/blang/semver\", sha256 = \"3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57\",", "should be created. \"\"\" key = \"omit_\" + name if", "[\"notice\"], # Apache 2.0 deps = [ \"@com_google_code_findbugs_jsr305\", \"@org_jetbrains_kotlin_stdlib\", ],", "name = \"com_github_tebeka_selenium\", importpath = \"github.com/tebeka/selenium\", sha256 = \"c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd\", strip_prefix", "dependencies are defined, they are not actually downloaded, unless a", "): com_google_errorprone_error_prone_annotations() if should_create_repository(\"com_google_guava\", kwargs): com_google_guava() if should_create_repository(\"com_squareup_okhttp3_okhttp\", kwargs): com_squareup_okhttp3_okhttp()", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", \"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", ], licenses = [\"notice\"], #", "excluded with an \"omit_\" + name parameter. This is useful", "2016 Google Inc. # # Licensed under the Apache License,", "kwargs): com_squareup_okhttp3_okhttp() if should_create_repository(\"com_squareup_okio\", kwargs): com_squareup_okio() if should_create_repository(\"commons_codec\", kwargs): commons_codec()", "= \"omit_\" + name if key in args: val =", "\"@org_jetbrains_kotlin_stdlib\", ], ) def commons_codec(): java_import_external( name = \"commons_codec\", jar_sha256", "jar_sha256 = \"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", ], licenses", "# BSD 3-clause ) def com_google_code_gson(): java_import_external( name = \"com_google_code_gson\",", "def commons_codec(): java_import_external( name = \"commons_codec\", jar_sha256 = \"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d\", jar_urls", "= \"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", ], licenses =", "same dependencies as this one (e.g. com_google_guava.) Alternatively, a whitelist", "\"omit_...\": bool pairs. Returns: boolean indicating whether the repository should", "\"c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd\", strip_prefix = \"selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a\", urls = [ \"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", \"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", ],", "in writing, software # distributed under the License is distributed", "= \"org_seleniumhq_selenium_remote_driver\", jar_sha256 = \"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\",", "when depending on rules_webtesting using http_archive. This function makes it", "1, deps = [\"@org_hamcrest_core\"], ) def net_bytebuddy(): java_import_external( name =", "name = \"com_google_code_gson\", jar_sha256 = \"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\",", "= False): \"\"\"Sets up repositories for browsers defined in //browsers/....", "should_create_repository(\"bazel_skylib\", kwargs): bazel_skylib() if should_create_repository(\"com_github_blang_semver\", kwargs): com_github_blang_semver() if should_create_repository(\"com_github_gorilla_context\", kwargs):", "kwargs): org_seleniumhq_selenium_remote_driver() if kwargs.keys(): print(\"The following parameters are unknown: \"", "to happen. It's OK to submit broken mirror # URLs,", "the repository. The args dict will be mutated to remove", "by previously defining a rule for the repository. The args", "omit_... parameters used to prevent importing specific dependencies. \"\"\" if", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", \"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", ], licenses = [\"notice\"], # Apache 2.0", "\"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e\", licenses = [\"notice\"], # Apache 2.0 deps = [", "makes it easy to import these transitive dependencies into the", "[ \"@com_squareup_okio\", \"@com_google_code_findbugs_jsr305\", ], ) def com_squareup_okio(): java_import_external( name =", "commons_codec(): java_import_external( name = \"commons_codec\", jar_sha256 = \"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d\", jar_urls =", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "repositories needed by rules_webtesting.\"\"\" load(\"//web/internal:platform_http_file.bzl\", \"platform_http_file\") load(\"@bazel_gazelle//:deps.bzl\", \"go_repository\") load(\"@bazel_tools//tools/build_defs/repo:http.bzl\", \"http_archive\")", "License, Version 2.0 (the \"License\"); # you may not use", "Apache License, Version 2.0 ) def org_chromium_chromedriver(): platform_http_file( name =", "jar_sha256 = \"1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", ], licenses", "[\"notice\"], # BSD 3-clause (maybe more?) amd64_sha256 = \"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8\", amd64_urls", "2-clause, MIT amd64_sha256 = \"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7\", amd64_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip\", ],", "\"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip\", ], ) def org_chromium_chromium(): platform_http_file( name = \"org_chromium_chromium\", licenses", "if should_create_repository(\"org_jetbrains_kotlin_stdlib\", kwargs): org_jetbrains_kotlin_stdlib() if should_create_repository(\"org_json\", kwargs): org_json() if should_create_repository(\"org_seleniumhq_py\",", "libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT", "com_squareup_okhttp3_okhttp() if should_create_repository(\"com_squareup_okio\", kwargs): com_squareup_okio() if should_create_repository(\"commons_codec\", kwargs): commons_codec() if", "# limitations under the License. \"\"\"Defines external repositories needed by", "while these dependencies are defined, they are not actually downloaded,", "= 1, deps = [\"@org_hamcrest_core\"], ) def net_bytebuddy(): java_import_external( name", "Bazel's downloader # has fast failover. def web_test_repositories(**kwargs): \"\"\"Defines external", "They must # be greppable for that to happen. It's", "], macos_sha256 = \"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce\", macos_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", ],", "the License for the specific language governing permissions and #", "if should_create_repository(\"org_seleniumhq_selenium_remote_driver\", kwargs): org_seleniumhq_selenium_remote_driver() if kwargs.keys(): print(\"The following parameters are", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", \"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", ], licenses = [\"notice\"], # Apache License,", "licenses = [\"notice\"], # BSD 3-clause ) def com_google_code_gson(): java_import_external(", "should_create_repository(\"com_github_gorilla_mux\", kwargs): com_github_gorilla_mux() if should_create_repository(\"com_github_tebeka_selenium\", kwargs): com_github_tebeka_selenium() if should_create_repository(\"com_github_urllib3\", kwargs):", "unless a target is built that depends on them. Args:", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "indicating whether the repository should be created. \"\"\" key =", "\"semver-3.5.1\", urls = [ \"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz\", \"https://github.com/blang/semver/archive/v3.5.1.tar.gz\", ], ) def com_github_gorilla_context():", ") def org_seleniumhq_selenium_remote_driver(): java_import_external( name = \"org_seleniumhq_selenium_remote_driver\", jar_sha256 = \"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2\",", "= \"com_google_code_gson\", jar_sha256 = \"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", \"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\",", "\"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", ], macos_sha256 = \"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce\", macos_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\",", "browser_repositories(firefox = False, chromium = False, sauce = False): \"\"\"Sets", "The Apache Software License, Version 2.0 testonly_ = 1, )", "\"79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", \"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", ], licenses = [\"notice\"],", "Configure repositories for //browsers:firefox-native. chromium: Configure repositories for //browsers:chromium-native. sauce:", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "2.0 exports = [ \"@com_google_code_findbugs_jsr305\", \"@com_google_errorprone_error_prone_annotations\", ], ) def com_saucelabs_sauce_connect():", "declaring their own direct dependencies, or when another Bazel project", "\"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", \"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", ], licenses = [\"notice\"],", "\"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", ], macos_sha256 = \"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce\", macos_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\",", "jar_sha256 = \"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", \"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", ], licenses", "def org_jetbrains_kotlin_stdlib(): java_import_external( name = \"org_jetbrains_kotlin_stdlib\", jar_sha256 = \"62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e\", jar_urls", "\"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", ], licenses = [\"notice\"], # Apache 2.0 ) def", "License, Version 2.0 ) def commons_logging(): java_import_external( name = \"commons_logging\",", "the same dependencies as this one (e.g. com_google_guava.) Alternatively, a", "\"\", strip_prefix = \"bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a\", urls = [ \"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", \"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", ],", "jar_sha256 = \"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e\", licenses = [\"notice\"], # Apache 2.0 deps", "OK to submit broken mirror # URLs, so long as", "the repository that should be checked. args: A dictionary that", ") def org_apache_httpcomponents_httpclient(): java_import_external( name = \"org_apache_httpcomponents_httpclient\", jar_sha256 = \"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7\",", "on them. Args: **kwargs: omit_... parameters used to prevent importing", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", ") def org_apache_httpcomponents_httpcore(): java_import_external( name = \"org_apache_httpcomponents_httpcore\", jar_sha256 = \"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5\",", "name = \"com_squareup_okhttp3_okhttp\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", \"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", ], jar_sha256", "\"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32\", macos_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip\", ], windows_sha256 = \"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c\", windows_urls", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "Apache 2.0 deps = [\"@com_google_code_findbugs_jsr305\"], ) def org_apache_commons_exec(): java_import_external( name", "java_import_external( name = \"org_apache_httpcomponents_httpclient\", jar_sha256 = \"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7\", jar_urls = [", "\"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", ], licenses = [\"notice\"], # The Apache Software License,", "note that while these dependencies are defined, they are not", "by rules_webtesting.\"\"\" load(\"//web/internal:platform_http_file.bzl\", \"platform_http_file\") load(\"@bazel_gazelle//:deps.bzl\", \"go_repository\") load(\"@bazel_tools//tools/build_defs/repo:http.bzl\", \"http_archive\") load(\"@bazel_tools//tools/build_defs/repo:java.bzl\", \"java_import_external\")", ") def org_seleniumhq_selenium_api(): java_import_external( name = \"org_seleniumhq_selenium_api\", jar_sha256 = \"1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b\",", "testonly_ = 1, ) def org_seleniumhq_selenium_remote_driver(): java_import_external( name = \"org_seleniumhq_selenium_remote_driver\",", "name = \"com_squareup_okio\", jar_sha256 = \"79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\",", "up repositories for browsers defined in //browsers/.... This should only", "3-clause ) def com_google_code_gson(): java_import_external( name = \"com_google_code_gson\", jar_sha256 =", "2.0 ) def commons_logging(): java_import_external( name = \"commons_logging\", jar_sha256 =", "the Apache License, Version 2.0 (the \"License\"); # you may", "# NOTE: URLs are mirrored by an asynchronous review process.", "= [ \"https://saucelabs.com/downloads/sc-4.4.12-win32.zip\", ], ) def com_squareup_okhttp3_okhttp(): java_import_external( name =", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", \"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", ], licenses = [\"notice\"], #", "def org_seleniumhq_selenium_remote_driver(): java_import_external( name = \"org_seleniumhq_selenium_remote_driver\", jar_sha256 = \"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2\", jar_urls", "load(\"@bazel_gazelle//:deps.bzl\", \"go_repository\") load(\"@bazel_tools//tools/build_defs/repo:http.bzl\", \"http_archive\") load(\"@bazel_tools//tools/build_defs/repo:java.bzl\", \"java_import_external\") # NOTE: URLs are", "jar_sha256 = \"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", ], licenses", "new repository. Alternatively, individual dependencies may be excluded with an", "rules_webtesting.\"\"\" load(\"//web/internal:platform_http_file.bzl\", \"platform_http_file\") load(\"@bazel_gazelle//:deps.bzl\", \"go_repository\") load(\"@bazel_tools//tools/build_defs/repo:http.bzl\", \"http_archive\") load(\"@bazel_tools//tools/build_defs/repo:java.bzl\", \"java_import_external\") #", "= str(Label(\"//build_files:org_seleniumhq_py.BUILD\")), sha256 = \"f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40\", strip_prefix = \"selenium-3.14.0\", urls =", "testonly_ = 1, deps = [\"@org_hamcrest_core\"], ) def net_bytebuddy(): java_import_external(", "amd64_sha256 = \"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de\", amd64_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", ], macos_sha256", "Software License, Version 2.0 ) def com_google_errorprone_error_prone_annotations(): java_import_external( name =", "1, ) def org_jetbrains_kotlin_stdlib(): java_import_external( name = \"org_jetbrains_kotlin_stdlib\", jar_sha256 =", "= [ \"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz\", ], ) def com_google_code_findbugs_jsr305(): java_import_external( name =", "platform_http_file( name = \"org_chromium_chromedriver\", licenses = [\"reciprocal\"], # BSD 3-clause,", "repository should be created. This allows creation of a repository", "projects to call from their WORKSPACE file when depending on", "may be excluded with an \"omit_\" + name parameter. This", "exists for other Bazel projects to call from their WORKSPACE", "= \"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", ], licenses =", "2.0 amd64_sha256 = \"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de\", amd64_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", ],", "rule for the repository. The args dict will be mutated", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", \"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", ], jar_sha256 = \"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e\", licenses = [\"notice\"],", "], ) def com_github_blang_semver(): go_repository( name = \"com_github_blang_semver\", importpath =", "java_import_external( name = \"com_google_guava\", jar_sha256 = \"a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c\", jar_urls = [", "name of the repository that should be checked. args: A", "str(kwargs.keys())) def should_create_repository(name, args): \"\"\"Returns whether the name repository should", "jar_sha256 = \"79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", \"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", ], licenses", "to import these transitive dependencies into the parent workspace. This", "= \"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de\", amd64_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", ], macos_sha256 =", "# URLs, so long as they're correctly formatted. Bazel's downloader", "\"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", \"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", ], licenses = [\"notice\"],", "boolean indicating whether the repository should be created. \"\"\" key", "macos_sha256 = \"920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66\", macos_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-osx.zip\", ], windows_sha256 =", "com_squareup_okio() if should_create_repository(\"commons_codec\", kwargs): commons_codec() if should_create_repository(\"commons_logging\", kwargs): commons_logging() if", "= \"urllib3-1.23\", urls = [ \"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz\", ], ) def com_google_code_findbugs_jsr305():", "\"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", ], licenses = [\"notice\"], # The Apache Software", "\"com_github_gorilla_mux\", importpath = \"github.com/gorilla/mux\", sha256 = \"0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3\", strip_prefix = \"mux-1.6.2\",", "License, Version 2.0 testonly_ = 1, deps = [ \"@com_google_code_gson\",", "specific dependencies. \"\"\" if should_create_repository(\"bazel_skylib\", kwargs): bazel_skylib() if should_create_repository(\"com_github_blang_semver\", kwargs):", "\"@com_google_guava\", \"@net_bytebuddy\", \"@com_squareup_okhttp3_okhttp\", \"@com_squareup_okio\", \"@commons_codec\", \"@commons_logging\", \"@org_apache_commons_exec\", \"@org_apache_httpcomponents_httpclient\", \"@org_apache_httpcomponents_httpcore\", \"@org_seleniumhq_selenium_api\",", "importpath = \"github.com/tebeka/selenium\", sha256 = \"c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd\", strip_prefix = \"selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a\", urls", "[\"reciprocal\"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic", "under the License is distributed on an \"AS IS\" BASIS,", "], macos_sha256 = \"920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66\", macos_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-osx.zip\", ], windows_sha256", "junit(): java_import_external( name = \"junit\", jar_sha256 = \"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a\", jar_urls =", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", \"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", ], licenses = [\"notice\"], #", "\"https://github.com/gorilla/context/archive/v1.1.1.tar.gz\", ], ) def com_github_gorilla_mux(): go_repository( name = \"com_github_gorilla_mux\", importpath", "the individual functions this method references. Please note that while", "= \"f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40\", strip_prefix = \"selenium-3.14.0\", urls = [ \"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz\", ],", "# Apache License, Version 2.0 deps = [ \"@org_apache_httpcomponents_httpcore\", \"@commons_logging\",", "Apache License, Version 2.0 deps = [ \"@org_apache_httpcomponents_httpcore\", \"@commons_logging\", \"@commons_codec\",", "the name repository should be created. This allows creation of", "org_chromium_chromium() if firefox: org_mozilla_firefox() org_mozilla_geckodriver() if sauce: com_saucelabs_sauce_connect() def bazel_skylib():", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", ], licenses = [\"notice\"], #", "an experimental basis; projects should define their own browsers. Args:", ") def net_bytebuddy(): java_import_external( name = \"net_bytebuddy\", jar_sha256 = \"4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2\",", "this method references. Please note that while these dependencies are", "The Apache Software License, Version 2.0 ) def org_json(): java_import_external(", "\"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", \"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", ], licenses = [\"notice\"], # Apache 2.0 )", "= [ \"@com_google_code_findbugs_jsr305\", \"@org_jetbrains_kotlin_stdlib\", ], ) def commons_codec(): java_import_external( name", "2.0 deps = [ \"@org_apache_httpcomponents_httpcore\", \"@commons_logging\", \"@commons_codec\", ], ) def", "This function makes it easy to import these transitive dependencies", "[\"notice\"], # Apache 2.0 ) def com_google_guava(): java_import_external( name =", "ANY KIND, either express or implied. # See the License", "the License. # You may obtain a copy of the", "[ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip\", ], windows_sha256 = \"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e\", windows_urls = [ \"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip\",", "commons_codec() if should_create_repository(\"commons_logging\", kwargs): commons_logging() if should_create_repository(\"junit\", kwargs): junit() if", "[\"notice\"], # BSD 3-clause ) def com_google_code_gson(): java_import_external( name =", "# See the License for the specific language governing permissions", "Public License 1.0 testonly_ = 1, deps = [\"@org_hamcrest_core\"], )", "licenses = [\"notice\"], # MIT-style license ) def org_mozilla_firefox(): platform_http_file(", "= \"4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", \"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", ], licenses =", "1, deps = [ \"@com_google_code_gson\", \"@com_google_guava\", \"@net_bytebuddy\", \"@com_squareup_okhttp3_okhttp\", \"@com_squareup_okio\", \"@commons_codec\",", "downloader # has fast failover. def web_test_repositories(**kwargs): \"\"\"Defines external repositories", "that defines the same dependencies as this one (e.g. com_google_guava.)", "if should_create_repository(\"net_bytebuddy\", kwargs): net_bytebuddy() if should_create_repository(\"org_apache_commons_exec\", kwargs): org_apache_commons_exec() if should_create_repository(\"org_apache_httpcomponents_httpclient\",", "# Eclipse Public License 1.0 testonly_ = 1, deps =", "jar_sha256 = \"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7\", licenses = [\"notice\"], # BSD 3-clause )", "commons_logging() if should_create_repository(\"junit\", kwargs): junit() if should_create_repository(\"net_bytebuddy\", kwargs): net_bytebuddy() if", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", \"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", ], jar_sha256 = \"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7\", licenses =", ") def junit(): java_import_external( name = \"junit\", jar_sha256 = \"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a\",", "a repository to be disabled by either an \"omit_\" _+", "name if key in args: val = args.pop(key) if val:", "[\"reciprocal\"], # Eclipse Public License 1.0 testonly_ = 1, deps", "\"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", ], ) def org_mozilla_geckodriver(): platform_http_file( name = \"org_mozilla_geckodriver\", licenses", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", ], licenses = [\"notice\"], # Apache", "licenses = [\"notice\"], # Apache License, Version 2.0 ) def", "name = \"org_chromium_chromium\", licenses = [\"notice\"], # BSD 3-clause (maybe", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "\"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", ], licenses = [\"notice\"], # New BSD License testonly_", "[ \"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz\", ], ) def org_seleniumhq_selenium_api(): java_import_external( name = \"org_seleniumhq_selenium_api\",", "writing, software # distributed under the License is distributed on", "\"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", ], macos_sha256 = \"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5\", macos_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\",", "], windows_sha256 = \"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5\", windows_urls = [ \"https://saucelabs.com/downloads/sc-4.4.12-win32.zip\", ], )", "\"@org_apache_httpcomponents_httpcore\", \"@commons_logging\", \"@commons_codec\", ], ) def org_apache_httpcomponents_httpcore(): java_import_external( name =", "mutated to remove \"omit_\" + name. Args: name: The name", "are mirrored by an asynchronous review process. They must #", "# MPL 2.0 amd64_sha256 = \"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de\", amd64_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\",", "broken mirror # URLs, so long as they're correctly formatted.", "def com_github_gorilla_mux(): go_repository( name = \"com_github_gorilla_mux\", importpath = \"github.com/gorilla/mux\", sha256", "\"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", \"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", ], licenses = [\"notice\"],", "are defined, they are not actually downloaded, unless a target", "jar_sha256 = \"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", \"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", ], licenses", "if should_create_repository(\"commons_codec\", kwargs): commons_codec() if should_create_repository(\"commons_logging\", kwargs): commons_logging() if should_create_repository(\"junit\",", "com_google_guava.) Alternatively, a whitelist model may be used by calling", "\"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", ], jar_sha256 = \"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e\", licenses = [\"notice\"], # Apache", "= \"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce\", macos_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", ], ) def", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", ], licenses = [\"notice\"], # The Apache", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar\", \"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar\", ], licenses = [\"notice\"], # Apache", "def bazel_skylib(): http_archive( name = \"bazel_skylib\", sha256 = \"\", strip_prefix", "= \"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6\", amd64_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", ], macos_sha256 =", "MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD", "the repository should be created. \"\"\" key = \"omit_\" +", "call from their WORKSPACE file when depending on rules_webtesting using", "(maybe more?) amd64_sha256 = \"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8\", amd64_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip\", ],", "build_file = str(Label(\"//build_files:com_github_urllib3.BUILD\")), sha256 = \"a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf\", strip_prefix = \"urllib3-1.23\", urls", "name = \"commons_logging\", jar_sha256 = \"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\",", "licenses = [\"notice\"], # New BSD License testonly_ = 1,", "Webtesting Rules. This function exists for other Bazel projects to", "com_squareup_okhttp3_okhttp(): java_import_external( name = \"com_squareup_okhttp3_okhttp\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", \"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\",", "\"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", \"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", ], jar_sha256 = \"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7\", licenses = [\"notice\"], #", "name = \"com_saucelabs_sauce_connect\", licenses = [\"by_exception_only\"], # SauceLabs EULA amd64_sha256", "\"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip\", ], macos_sha256 = \"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae\", macos_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip\", ],", "MPL 2.0 amd64_sha256 = \"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de\", amd64_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\",", "that depends on them. Args: **kwargs: omit_... parameters used to", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", \"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", ], jar_sha256 = \"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7\", licenses = [\"notice\"],", "deps = [ \"@com_google_code_findbugs_jsr305\", \"@org_jetbrains_kotlin_stdlib\", ], ) def commons_codec(): java_import_external(", "], ) def org_chromium_chromium(): platform_http_file( name = \"org_chromium_chromium\", licenses =", "# MIT-style license ) def org_mozilla_firefox(): platform_http_file( name = \"org_mozilla_firefox\",", "\"com_github_gorilla_context\", importpath = \"github.com/gorilla/context\", sha256 = \"2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03\", strip_prefix = \"context-1.1.1\",", "def com_saucelabs_sauce_connect(): platform_http_file( name = \"com_saucelabs_sauce_connect\", licenses = [\"by_exception_only\"], #", "def commons_logging(): java_import_external( name = \"commons_logging\", jar_sha256 = \"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636\", jar_urls", "com_github_gorilla_mux(): go_repository( name = \"com_github_gorilla_mux\", importpath = \"github.com/gorilla/mux\", sha256 =", ") def org_mozilla_geckodriver(): platform_http_file( name = \"org_mozilla_geckodriver\", licenses = [\"reciprocal\"],", "windows_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip\", ], ) def org_hamcrest_core(): java_import_external( name", "], ) def com_github_gorilla_mux(): go_repository( name = \"com_github_gorilla_mux\", importpath =", "# has fast failover. def web_test_repositories(**kwargs): \"\"\"Defines external repositories required", "\"org_seleniumhq_selenium_remote_driver\", jar_sha256 = \"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", ],", "], ) def com_google_code_findbugs_jsr305(): java_import_external( name = \"com_google_code_findbugs_jsr305\", jar_urls =", "2.0 ) def com_google_errorprone_error_prone_annotations(): java_import_external( name = \"com_google_errorprone_error_prone_annotations\", jar_sha256 =", "deps = [\"@com_google_code_findbugs_jsr305\"], ) def org_apache_commons_exec(): java_import_external( name = \"org_apache_commons_exec\",", "by either an \"omit_\" _+ name parameter or by previously", "org_seleniumhq_py(): http_archive( name = \"org_seleniumhq_py\", build_file = str(Label(\"//build_files:org_seleniumhq_py.BUILD\")), sha256 =", ") def com_google_code_findbugs_jsr305(): java_import_external( name = \"com_google_code_findbugs_jsr305\", jar_urls = [", "Inc. # # Licensed under the Apache License, Version 2.0", "[\"notice\"], # Apache License, Version 2.0 deps = [ \"@org_apache_httpcomponents_httpcore\",", "created. This allows creation of a repository to be disabled", "licenses = [\"notice\"], # Apache 2.0 ) def com_google_guava(): java_import_external(", "sauce: com_saucelabs_sauce_connect() def bazel_skylib(): http_archive( name = \"bazel_skylib\", sha256 =", "URLs, so long as they're correctly formatted. Bazel's downloader #", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "pairs. Returns: boolean indicating whether the repository should be created.", "3-clause (maybe more?) amd64_sha256 = \"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8\", amd64_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip\",", "process. They must # be greppable for that to happen.", "\"@com_squareup_okhttp3_okhttp\", \"@com_squareup_okio\", \"@commons_codec\", \"@commons_logging\", \"@org_apache_commons_exec\", \"@org_apache_httpcomponents_httpclient\", \"@org_apache_httpcomponents_httpcore\", \"@org_seleniumhq_selenium_api\", ], )", "name = \"org_mozilla_firefox\", licenses = [\"reciprocal\"], # MPL 2.0 amd64_sha256", "\"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7\", amd64_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip\", ], macos_sha256 = \"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae\", macos_urls", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "target is built that depends on them. Args: **kwargs: omit_...", "importpath = \"github.com/gorilla/mux\", sha256 = \"0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3\", strip_prefix = \"mux-1.6.2\", urls", "The Apache Software License, Version 2.0 ) def com_google_errorprone_error_prone_annotations(): java_import_external(", "False return True def browser_repositories(firefox = False, chromium = False,", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "jar_sha256 = \"62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", \"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", ], licenses", "only be used on an experimental basis; projects should define", "mirror # URLs, so long as they're correctly formatted. Bazel's", "com_github_tebeka_selenium() if should_create_repository(\"com_github_urllib3\", kwargs): com_github_urllib3() if should_create_repository(\"com_google_code_findbugs_jsr305\", kwargs): com_google_code_findbugs_jsr305() if", "their own browsers. Args: firefox: Configure repositories for //browsers:firefox-native. chromium:", "\"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", ], licenses = [\"notice\"], # Apache License, Version 2.0", "False, chromium = False, sauce = False): \"\"\"Sets up repositories", "importpath = \"github.com/blang/semver\", sha256 = \"3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57\", strip_prefix = \"semver-3.5.1\", urls", "\"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", ], licenses = [\"notice\"], # The Apache Software", "\"920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66\", macos_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-osx.zip\", ], windows_sha256 = \"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5\", windows_urls", "\"\"\" if chromium: org_chromium_chromedriver() org_chromium_chromium() if firefox: org_mozilla_firefox() org_mozilla_geckodriver() if", "governing permissions and # limitations under the License. \"\"\"Defines external", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "= \"org_chromium_chromedriver\", licenses = [\"reciprocal\"], # BSD 3-clause, ICU, MPL", "org_hamcrest_core() if should_create_repository(\"org_jetbrains_kotlin_stdlib\", kwargs): org_jetbrains_kotlin_stdlib() if should_create_repository(\"org_json\", kwargs): org_json() if", "\"com_google_code_gson\", jar_sha256 = \"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", \"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", ],", "SauceLabs EULA amd64_sha256 = \"dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52\", amd64_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz\", ],", "= [ \"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz\", \"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz\", ], ) def com_github_tebeka_selenium(): go_repository( name", "= \"com_saucelabs_sauce_connect\", licenses = [\"by_exception_only\"], # SauceLabs EULA amd64_sha256 =", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", \"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", ], licenses = [\"reciprocal\"], # Eclipse Public", "to call from their WORKSPACE file when depending on rules_webtesting", "\"org_chromium_chromium\", licenses = [\"notice\"], # BSD 3-clause (maybe more?) amd64_sha256", "specific language governing permissions and # limitations under the License.", "if should_create_repository(\"com_squareup_okio\", kwargs): com_squareup_okio() if should_create_repository(\"commons_codec\", kwargs): commons_codec() if should_create_repository(\"commons_logging\",", "Args: name: The name of the repository that should be", "their WORKSPACE file when depending on rules_webtesting using http_archive. This", "with an \"omit_\" + name parameter. This is useful for", "testonly_ = 1, ) def org_jetbrains_kotlin_stdlib(): java_import_external( name = \"org_jetbrains_kotlin_stdlib\",", "# The Apache Software License, Version 2.0 ) def junit():", "name. Args: name: The name of the repository that should", "another Bazel project is depended upon (e.g. rules_closure) that defines", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", \"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", ], licenses = [\"reciprocal\"], # Eclipse", "for //browsers:chromium-native. sauce: Configure repositories for //browser/sauce:chrome-win10. \"\"\" if chromium:", "# you may not use this file except in compliance", "\"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", \"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", ], licenses = [\"notice\"],", "jar_sha256 = \"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", \"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", ], licenses", "\"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip\", ], windows_sha256 = \"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e\", windows_urls = [ \"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip\", ],", "go_repository( name = \"com_github_gorilla_context\", importpath = \"github.com/gorilla/context\", sha256 = \"2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03\",", "def web_test_repositories(**kwargs): \"\"\"Defines external repositories required by Webtesting Rules. This", "= \"github.com/blang/semver\", sha256 = \"3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57\", strip_prefix = \"semver-3.5.1\", urls =", "# Apache 2.0 deps = [\"@com_google_code_findbugs_jsr305\"], ) def org_apache_commons_exec(): java_import_external(", "It's OK to submit broken mirror # URLs, so long", "], windows_sha256 = \"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e\", windows_urls = [ \"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip\", ], )", "com_github_gorilla_context(): go_repository( name = \"com_github_gorilla_context\", importpath = \"github.com/gorilla/context\", sha256 =", "contains \"omit_...\": bool pairs. Returns: boolean indicating whether the repository", "\"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", ], licenses = [\"notice\"], # Apache 2.0 deps =", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", \"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", ], licenses = [\"notice\"], # Apache 2.0", "licenses = [\"notice\"], # BSD 3-clause (maybe more?) amd64_sha256 =", "License, Version 2.0 deps = [ \"@org_apache_httpcomponents_httpcore\", \"@commons_logging\", \"@commons_codec\", ],", "if val: return False if native.existing_rule(name): return False return True", "\"omit_\" + name parameter. This is useful for users who", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "= \"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", \"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", ], licenses =", "should_create_repository(\"org_jetbrains_kotlin_stdlib\", kwargs): org_jetbrains_kotlin_stdlib() if should_create_repository(\"org_json\", kwargs): org_json() if should_create_repository(\"org_seleniumhq_py\", kwargs):", "sha256 = \"\", strip_prefix = \"bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a\", urls = [ \"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\",", "load(\"//web/internal:platform_http_file.bzl\", \"platform_http_file\") load(\"@bazel_gazelle//:deps.bzl\", \"go_repository\") load(\"@bazel_tools//tools/build_defs/repo:http.bzl\", \"http_archive\") load(\"@bazel_tools//tools/build_defs/repo:java.bzl\", \"java_import_external\") # NOTE:", "junit() if should_create_repository(\"net_bytebuddy\", kwargs): net_bytebuddy() if should_create_repository(\"org_apache_commons_exec\", kwargs): org_apache_commons_exec() if", "that to happen. It's OK to submit broken mirror #", "str(Label(\"//build_files:com_github_urllib3.BUILD\")), sha256 = \"a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf\", strip_prefix = \"urllib3-1.23\", urls = [", "java_import_external( name = \"com_google_code_gson\", jar_sha256 = \"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81\", jar_urls = [", "under the Apache License, Version 2.0 (the \"License\"); # you", "name = \"org_apache_httpcomponents_httpcore\", jar_sha256 = \"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\",", "functions this method references. Please note that while these dependencies", "= [ \"@com_squareup_okio\", \"@com_google_code_findbugs_jsr305\", ], ) def com_squareup_okio(): java_import_external( name", "net_bytebuddy(): java_import_external( name = \"net_bytebuddy\", jar_sha256 = \"4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2\", jar_urls =", "Alternatively, individual dependencies may be excluded with an \"omit_\" +", "java_import_external( name = \"org_apache_commons_exec\", jar_sha256 = \"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b\", jar_urls = [", "= [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip\", ], macos_sha256 = \"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae\", macos_urls = [", "repository has been previously defined before defining a new repository.", "by an asynchronous review process. They must # be greppable", "if should_create_repository(\"org_hamcrest_core\", kwargs): org_hamcrest_core() if should_create_repository(\"org_jetbrains_kotlin_stdlib\", kwargs): org_jetbrains_kotlin_stdlib() if should_create_repository(\"org_json\",", "native.existing_rule(name): return False return True def browser_repositories(firefox = False, chromium", "should_create_repository(\"com_google_code_gson\", kwargs): com_google_code_gson() if should_create_repository( \"com_google_errorprone_error_prone_annotations\", kwargs, ): com_google_errorprone_error_prone_annotations() if", "= [\"notice\"], # BSD 3-clause (maybe more?) amd64_sha256 = \"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8\",", "License, Version 2.0 ) def org_chromium_chromedriver(): platform_http_file( name = \"org_chromium_chromedriver\",", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "# Copyright 2016 Google Inc. # # Licensed under the", "\"org_seleniumhq_selenium_api\", jar_sha256 = \"1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", ],", "previously defined before defining a new repository. Alternatively, individual dependencies", "name = \"com_github_blang_semver\", importpath = \"github.com/blang/semver\", sha256 = \"3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57\", strip_prefix", "\"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", ], licenses = [\"notice\"], # Apache License, Version 2.0", "\"go_repository\") load(\"@bazel_tools//tools/build_defs/repo:http.bzl\", \"http_archive\") load(\"@bazel_tools//tools/build_defs/repo:java.bzl\", \"java_import_external\") # NOTE: URLs are mirrored", "if should_create_repository(\"com_google_code_gson\", kwargs): com_google_code_gson() if should_create_repository( \"com_google_errorprone_error_prone_annotations\", kwargs, ): com_google_errorprone_error_prone_annotations()", "License testonly_ = 1, ) def org_jetbrains_kotlin_stdlib(): java_import_external( name =", "\"com_squareup_okhttp3_okhttp\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", \"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar\", ], jar_sha256 = \"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e\",", ") def commons_codec(): java_import_external( name = \"commons_codec\", jar_sha256 = \"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d\",", "individual dependencies may be excluded with an \"omit_\" + name", "licenses = [\"notice\"], # The Apache Software License, Version 2.0", "as they're correctly formatted. Bazel's downloader # has fast failover.", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", \"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", ], licenses = [\"notice\"], # Apache License,", "[\"notice\"], # Apache License, Version 2.0 ) def org_chromium_chromedriver(): platform_http_file(", "], macos_sha256 = \"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5\", macos_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", ],", "Version 2.0 testonly_ = 1, deps = [ \"@com_google_code_gson\", \"@com_google_guava\",", "_+ name parameter or by previously defining a rule for", "kwargs): org_seleniumhq_py() if should_create_repository(\"org_seleniumhq_selenium_api\", kwargs): org_seleniumhq_selenium_api() if should_create_repository(\"org_seleniumhq_selenium_remote_driver\", kwargs): org_seleniumhq_selenium_remote_driver()", "created. \"\"\" key = \"omit_\" + name if key in", "com_google_guava() if should_create_repository(\"com_squareup_okhttp3_okhttp\", kwargs): com_squareup_okhttp3_okhttp() if should_create_repository(\"com_squareup_okio\", kwargs): com_squareup_okio() if", "java_import_external( name = \"commons_codec\", jar_sha256 = \"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d\", jar_urls = [", "\"com_google_errorprone_error_prone_annotations\", jar_sha256 = \"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", \"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", ],", "args): \"\"\"Returns whether the name repository should be created. This", "= \"context-1.1.1\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz\", \"https://github.com/gorilla/context/archive/v1.1.1.tar.gz\", ], ) def", "= \"0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3\", strip_prefix = \"mux-1.6.2\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz\", \"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz\",", "external repositories required by Webtesting Rules. This function exists for", "[ \"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz\", \"https://github.com/blang/semver/archive/v3.5.1.tar.gz\", ], ) def com_github_gorilla_context(): go_repository( name =", "\"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", ], ) def org_seleniumhq_py(): http_archive( name = \"org_seleniumhq_py\",", "# BSD 3-clause (maybe more?) amd64_sha256 = \"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8\", amd64_urls =", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", \"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", ], licenses = [\"notice\"], # The", "Version 2.0 ) def org_json(): java_import_external( name = \"org_json\", jar_sha256", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", \"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", ], licenses = [\"notice\"], # New BSD", "= \"com_google_errorprone_error_prone_annotations\", jar_sha256 = \"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", \"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\",", "should_create_repository(name, args): \"\"\"Returns whether the name repository should be created.", "This will check to see if a repository has been", "net_bytebuddy() if should_create_repository(\"org_apache_commons_exec\", kwargs): org_apache_commons_exec() if should_create_repository(\"org_apache_httpcomponents_httpclient\", kwargs): org_apache_httpcomponents_httpclient() if", "= False, chromium = False, sauce = False): \"\"\"Sets up", "//browsers:chromium-native. sauce: Configure repositories for //browser/sauce:chrome-win10. \"\"\" if chromium: org_chromium_chromedriver()", "Apache Software License, Version 2.0 ) def junit(): java_import_external( name", "kwargs): org_seleniumhq_selenium_api() if should_create_repository(\"org_seleniumhq_selenium_remote_driver\", kwargs): org_seleniumhq_selenium_remote_driver() if kwargs.keys(): print(\"The following", "org_seleniumhq_selenium_remote_driver() if kwargs.keys(): print(\"The following parameters are unknown: \" +", "should_create_repository(\"commons_codec\", kwargs): commons_codec() if should_create_repository(\"commons_logging\", kwargs): commons_logging() if should_create_repository(\"junit\", kwargs):", "\"github.com/gorilla/context\", sha256 = \"2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03\", strip_prefix = \"context-1.1.1\", urls = [", "# Apache 2.0 deps = [ \"@com_squareup_okio\", \"@com_google_code_findbugs_jsr305\", ], )", "Software License, Version 2.0 ) def junit(): java_import_external( name =", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", \"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", ], licenses = [\"notice\"], #", "def com_google_guava(): java_import_external( name = \"com_google_guava\", jar_sha256 = \"a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c\", jar_urls", "project is depended upon (e.g. rules_closure) that defines the same", "\"org_seleniumhq_py\", build_file = str(Label(\"//build_files:org_seleniumhq_py.BUILD\")), sha256 = \"f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40\", strip_prefix = \"selenium-3.14.0\",", "should_create_repository(\"org_seleniumhq_selenium_api\", kwargs): org_seleniumhq_selenium_api() if should_create_repository(\"org_seleniumhq_selenium_remote_driver\", kwargs): org_seleniumhq_selenium_remote_driver() if kwargs.keys(): print(\"The", "\"@commons_logging\", \"@commons_codec\", ], ) def org_apache_httpcomponents_httpcore(): java_import_external( name = \"org_apache_httpcomponents_httpcore\",", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", ], licenses = [\"notice\"], # The", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar\", \"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar\", ], licenses = [\"notice\"], #", "def org_apache_httpcomponents_httpcore(): java_import_external( name = \"org_apache_httpcomponents_httpcore\", jar_sha256 = \"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5\", jar_urls", "kwargs): org_json() if should_create_repository(\"org_seleniumhq_py\", kwargs): org_seleniumhq_py() if should_create_repository(\"org_seleniumhq_selenium_api\", kwargs): org_seleniumhq_selenium_api()", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "http_archive. This function makes it easy to import these transitive", "windows_sha256 = \"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c\", windows_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip\", ], ) def", "if should_create_repository(\"com_google_code_findbugs_jsr305\", kwargs): com_google_code_findbugs_jsr305() if should_create_repository(\"com_google_code_gson\", kwargs): com_google_code_gson() if should_create_repository(", "exports = [ \"@com_google_code_findbugs_jsr305\", \"@com_google_errorprone_error_prone_annotations\", ], ) def com_saucelabs_sauce_connect(): platform_http_file(", "= [\"by_exception_only\"], # SauceLabs EULA amd64_sha256 = \"dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52\", amd64_urls =", "urls = [ \"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz\", ], ) def org_seleniumhq_selenium_api(): java_import_external( name", "not actually downloaded, unless a target is built that depends", "language governing permissions and # limitations under the License. \"\"\"Defines", "= 1, ) def org_seleniumhq_selenium_remote_driver(): java_import_external( name = \"org_seleniumhq_selenium_remote_driver\", jar_sha256", "= \"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", \"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", ], licenses =", "http_archive( name = \"bazel_skylib\", sha256 = \"\", strip_prefix = \"bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a\",", "by Webtesting Rules. This function exists for other Bazel projects", "# MPL 2.0 amd64_sha256 = \"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6\", amd64_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\",", "web_test_repositories(**kwargs): \"\"\"Defines external repositories required by Webtesting Rules. This function", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "= \"a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar\", \"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar\", ], licenses =", "Apache License, Version 2.0 ) def org_apache_httpcomponents_httpclient(): java_import_external( name =", "the License. \"\"\"Defines external repositories needed by rules_webtesting.\"\"\" load(\"//web/internal:platform_http_file.bzl\", \"platform_http_file\")", "], ) def com_saucelabs_sauce_connect(): platform_http_file( name = \"com_saucelabs_sauce_connect\", licenses =", "[\"reciprocal\"], # MPL 2.0 amd64_sha256 = \"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6\", amd64_urls = [", "\"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar\", \"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar\", ], licenses = [\"notice\"], # Apache 2.0 exports", "org_apache_httpcomponents_httpclient() if should_create_repository(\"org_apache_httpcomponents_httpcore\", kwargs): org_apache_httpcomponents_httpcore() if should_create_repository(\"org_hamcrest_core\", kwargs): org_hamcrest_core() if", "useful for users who want to be rigorous about declaring", "go_repository( name = \"com_github_tebeka_selenium\", importpath = \"github.com/tebeka/selenium\", sha256 = \"c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd\",", "This should only be used on an experimental basis; projects", "\"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", ], ) def com_github_blang_semver(): go_repository( name = \"com_github_blang_semver\", importpath", "dependencies, or when another Bazel project is depended upon (e.g.", "macos_sha256 = \"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce\", macos_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", ], )", "\"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz\", \"https://github.com/gorilla/context/archive/v1.1.1.tar.gz\", ], ) def com_github_gorilla_mux(): go_repository( name = \"com_github_gorilla_mux\",", "], ) def org_seleniumhq_py(): http_archive( name = \"org_seleniumhq_py\", build_file =", "is depended upon (e.g. rules_closure) that defines the same dependencies", "= \"com_github_urllib3\", build_file = str(Label(\"//build_files:com_github_urllib3.BUILD\")), sha256 = \"a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf\", strip_prefix =", "\"\"\" key = \"omit_\" + name if key in args:", "], macos_sha256 = \"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae\", macos_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip\", ], windows_sha256", ") def org_hamcrest_core(): java_import_external( name = \"org_hamcrest_core\", jar_sha256 = \"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9\",", "org_chromium_chromium(): platform_http_file( name = \"org_chromium_chromium\", licenses = [\"notice\"], # BSD", "name parameter. This is useful for users who want to", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", \"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", ], licenses = [\"notice\"], # The Apache", "# New BSD License testonly_ = 1, ) def org_jetbrains_kotlin_stdlib():", "jar_sha256 = \"4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", \"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", ], licenses", "\"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", \"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", ], licenses = [\"reciprocal\"], # Eclipse Public License", "rigorous about declaring their own direct dependencies, or when another", "if should_create_repository(\"bazel_skylib\", kwargs): bazel_skylib() if should_create_repository(\"com_github_blang_semver\", kwargs): com_github_blang_semver() if should_create_repository(\"com_github_gorilla_context\",", "com_google_code_findbugs_jsr305(): java_import_external( name = \"com_google_code_findbugs_jsr305\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", \"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\",", "[\"notice\"], # Apache 2.0 deps = [ \"@com_squareup_okio\", \"@com_google_code_findbugs_jsr305\", ],", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", ], licenses = [\"notice\"], # The Apache", "defining a rule for the repository. The args dict will", "com_github_urllib3(): http_archive( name = \"com_github_urllib3\", build_file = str(Label(\"//build_files:com_github_urllib3.BUILD\")), sha256 =", "\"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", \"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", ], licenses = [\"notice\"], # The Apache Software", "= [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", ], ) def org_mozilla_geckodriver(): platform_http_file( name", "use this file except in compliance with the License. #", "= \"com_squareup_okio\", jar_sha256 = \"79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", \"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\",", "\"java_import_external\") # NOTE: URLs are mirrored by an asynchronous review", "Bazel project is depended upon (e.g. rules_closure) that defines the", "org_apache_httpcomponents_httpclient(): java_import_external( name = \"org_apache_httpcomponents_httpclient\", jar_sha256 = \"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7\", jar_urls =", "license ) def org_mozilla_firefox(): platform_http_file( name = \"org_mozilla_firefox\", licenses =", "be excluded with an \"omit_\" + name parameter. This is", "[\"notice\"], # Apache 2.0 deps = [\"@com_google_code_findbugs_jsr305\"], ) def org_apache_commons_exec():", "[\"notice\"], # The Apache Software License, Version 2.0 ) def", "urls = [ \"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", \"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", ], ) def com_github_blang_semver(): go_repository(", "an \"omit_\" _+ name parameter or by previously defining a", "that contains \"omit_...\": bool pairs. Returns: boolean indicating whether the", "# The Apache Software License, Version 2.0 ) def com_google_errorprone_error_prone_annotations():", "License. \"\"\"Defines external repositories needed by rules_webtesting.\"\"\" load(\"//web/internal:platform_http_file.bzl\", \"platform_http_file\") load(\"@bazel_gazelle//:deps.bzl\",", "sha256 = \"c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd\", strip_prefix = \"selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a\", urls = [ \"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\",", "strip_prefix = \"selenium-3.14.0\", urls = [ \"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz\", ], ) def", "], licenses = [\"notice\"], # Apache License, Version 2.0 deps", "2.0, BSD 2-clause, MIT amd64_sha256 = \"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7\", amd64_urls = [", "\"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", ], licenses = [\"notice\"], # Apache 2.0 deps =", "org_hamcrest_core(): java_import_external( name = \"org_hamcrest_core\", jar_sha256 = \"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9\", jar_urls =", "in compliance with the License. # You may obtain a", "kwargs): net_bytebuddy() if should_create_repository(\"org_apache_commons_exec\", kwargs): org_apache_commons_exec() if should_create_repository(\"org_apache_httpcomponents_httpclient\", kwargs): org_apache_httpcomponents_httpclient()", "software # distributed under the License is distributed on an", "= \"bazel_skylib\", sha256 = \"\", strip_prefix = \"bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a\", urls =", "they are not actually downloaded, unless a target is built", "], licenses = [\"notice\"], # MIT-style license ) def org_mozilla_firefox():", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", ], licenses = [\"notice\"], #", "will be mutated to remove \"omit_\" + name. Args: name:", "Free License v. 2.0, BSD 2-clause, MIT amd64_sha256 = \"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7\",", "\"omit_\" + name. Args: name: The name of the repository", "License, Version 2.0 ) def com_google_errorprone_error_prone_annotations(): java_import_external( name = \"com_google_errorprone_error_prone_annotations\",", "org_mozilla_geckodriver(): platform_http_file( name = \"org_mozilla_geckodriver\", licenses = [\"reciprocal\"], # MPL", "com_github_blang_semver() if should_create_repository(\"com_github_gorilla_context\", kwargs): com_github_gorilla_context() if should_create_repository(\"com_github_gorilla_mux\", kwargs): com_github_gorilla_mux() if", "\"org_apache_commons_exec\", jar_sha256 = \"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", \"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", ],", "[\"@org_hamcrest_core\"], ) def net_bytebuddy(): java_import_external( name = \"net_bytebuddy\", jar_sha256 =", "sauce: Configure repositories for //browser/sauce:chrome-win10. \"\"\" if chromium: org_chromium_chromedriver() org_chromium_chromium()", "kwargs): org_jetbrains_kotlin_stdlib() if should_create_repository(\"org_json\", kwargs): org_json() if should_create_repository(\"org_seleniumhq_py\", kwargs): org_seleniumhq_py()", "a target is built that depends on them. Args: **kwargs:", "licenses = [\"notice\"], # Apache 2.0 deps = [ \"@com_google_code_findbugs_jsr305\",", "MIT amd64_sha256 = \"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7\", amd64_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip\", ], macos_sha256", "urls = [ \"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz\", \"https://github.com/gorilla/context/archive/v1.1.1.tar.gz\", ], ) def com_github_gorilla_mux(): go_repository(", "def browser_repositories(firefox = False, chromium = False, sauce = False):", ") def com_google_guava(): java_import_external( name = \"com_google_guava\", jar_sha256 = \"a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c\",", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", \"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", ], licenses = [\"notice\"], # Apache 2.0", "def com_google_code_gson(): java_import_external( name = \"com_google_code_gson\", jar_sha256 = \"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81\", jar_urls", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", \"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", ], licenses = [\"notice\"], # The", "org_chromium_chromedriver(): platform_http_file( name = \"org_chromium_chromedriver\", licenses = [\"reciprocal\"], # BSD", "= \"org_chromium_chromium\", licenses = [\"notice\"], # BSD 3-clause (maybe more?)", "with the License. # You may obtain a copy of", "= \"c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd\", strip_prefix = \"selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a\", urls = [ \"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", \"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\",", "external repositories needed by rules_webtesting.\"\"\" load(\"//web/internal:platform_http_file.bzl\", \"platform_http_file\") load(\"@bazel_gazelle//:deps.bzl\", \"go_repository\") load(\"@bazel_tools//tools/build_defs/repo:http.bzl\",", "Apache License, Version 2.0 ) def commons_logging(): java_import_external( name =", "if sauce: com_saucelabs_sauce_connect() def bazel_skylib(): http_archive( name = \"bazel_skylib\", sha256", "\"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", \"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", ], ) def com_github_urllib3(): http_archive( name = \"com_github_urllib3\",", "upon (e.g. rules_closure) that defines the same dependencies as this", "= str(Label(\"//build_files:com_github_urllib3.BUILD\")), sha256 = \"a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf\", strip_prefix = \"urllib3-1.23\", urls =", "dependencies as this one (e.g. com_google_guava.) Alternatively, a whitelist model", "greppable for that to happen. It's OK to submit broken", "name = \"org_chromium_chromedriver\", licenses = [\"reciprocal\"], # BSD 3-clause, ICU,", "\"518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", \"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", ], licenses = [\"notice\"],", "a whitelist model may be used by calling the individual", "= False, sauce = False): \"\"\"Sets up repositories for browsers", "amd64_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", ], macos_sha256 = \"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5\", macos_urls", "if should_create_repository(\"com_github_gorilla_context\", kwargs): com_github_gorilla_context() if should_create_repository(\"com_github_gorilla_mux\", kwargs): com_github_gorilla_mux() if should_create_repository(\"com_github_tebeka_selenium\",", "2.0 ) def org_json(): java_import_external( name = \"org_json\", jar_sha256 =", "BSD 2-clause, MIT amd64_sha256 = \"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7\", amd64_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip\",", "that should be checked. args: A dictionary that contains \"omit_...\":", "should_create_repository(\"com_squareup_okio\", kwargs): com_squareup_okio() if should_create_repository(\"commons_codec\", kwargs): commons_codec() if should_create_repository(\"commons_logging\", kwargs):", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "be checked. args: A dictionary that contains \"omit_...\": bool pairs.", "if should_create_repository(\"com_github_tebeka_selenium\", kwargs): com_github_tebeka_selenium() if should_create_repository(\"com_github_urllib3\", kwargs): com_github_urllib3() if should_create_repository(\"com_google_code_findbugs_jsr305\",", "Version 2.0 ) def commons_logging(): java_import_external( name = \"commons_logging\", jar_sha256", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", \"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar\", ], licenses = [\"notice\"], # Apache", "# Apache License, Version 2.0 ) def org_apache_httpcomponents_httpclient(): java_import_external( name", "name = \"org_seleniumhq_py\", build_file = str(Label(\"//build_files:org_seleniumhq_py.BUILD\")), sha256 = \"f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40\", strip_prefix", "CONDITIONS OF ANY KIND, either express or implied. # See", "def org_hamcrest_core(): java_import_external( name = \"org_hamcrest_core\", jar_sha256 = \"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9\", jar_urls", "\"a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar\", \"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar\", ], licenses = [\"notice\"],", "individual functions this method references. Please note that while these", "Version 2.0 deps = [ \"@org_apache_httpcomponents_httpcore\", \"@commons_logging\", \"@commons_codec\", ], )", "java_import_external( name = \"com_google_code_findbugs_jsr305\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", \"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", ],", "who want to be rigorous about declaring their own direct", "], ) def com_github_tebeka_selenium(): go_repository( name = \"com_github_tebeka_selenium\", importpath =", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", ], licenses = [\"notice\"], #", "\"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", \"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", ], licenses = [\"notice\"], # Apache 2.0 deps", "kwargs): com_github_gorilla_mux() if should_create_repository(\"com_github_tebeka_selenium\", kwargs): com_github_tebeka_selenium() if should_create_repository(\"com_github_urllib3\", kwargs): com_github_urllib3()", "\"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", ], licenses = [\"notice\"], # The Apache Software License,", "has been previously defined before defining a new repository. Alternatively,", "= \"a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf\", strip_prefix = \"urllib3-1.23\", urls = [ \"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz\", ],", "name = \"com_google_guava\", jar_sha256 = \"a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar\",", "jar_sha256 = \"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", ], licenses", "com_github_blang_semver(): go_repository( name = \"com_github_blang_semver\", importpath = \"github.com/blang/semver\", sha256 =", "+ name parameter. This is useful for users who want", "build_file = str(Label(\"//build_files:org_seleniumhq_py.BUILD\")), sha256 = \"f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40\", strip_prefix = \"selenium-3.14.0\", urls", "is built that depends on them. Args: **kwargs: omit_... parameters", ") def org_json(): java_import_external( name = \"org_json\", jar_sha256 = \"518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8\",", "submit broken mirror # URLs, so long as they're correctly", "[ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz\", ], ) def org_seleniumhq_py(): http_archive( name =", "License, Version 2.0 testonly_ = 1, ) def org_seleniumhq_selenium_remote_driver(): java_import_external(", "if should_create_repository(\"commons_logging\", kwargs): commons_logging() if should_create_repository(\"junit\", kwargs): junit() if should_create_repository(\"net_bytebuddy\",", "\"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", \"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", ], ) def com_github_blang_semver(): go_repository( name = \"com_github_blang_semver\",", "def org_apache_httpcomponents_httpclient(): java_import_external( name = \"org_apache_httpcomponents_httpclient\", jar_sha256 = \"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7\", jar_urls", "WORKSPACE file when depending on rules_webtesting using http_archive. This function", "\"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", ], licenses = [\"notice\"],", "\"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar\", ], licenses = [\"reciprocal\"], # Eclipse Public License 1.0", "\"62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", \"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", ], licenses = [\"notice\"],", "\"https://saucelabs.com/downloads/sc-4.5.1-osx.zip\", ], windows_sha256 = \"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5\", windows_urls = [ \"https://saucelabs.com/downloads/sc-4.4.12-win32.zip\", ],", "\"3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57\", strip_prefix = \"semver-3.5.1\", urls = [ \"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz\", \"https://github.com/blang/semver/archive/v3.5.1.tar.gz\", ],", "kwargs.keys(): print(\"The following parameters are unknown: \" + str(kwargs.keys())) def", "load(\"@bazel_tools//tools/build_defs/repo:java.bzl\", \"java_import_external\") # NOTE: URLs are mirrored by an asynchronous", "jar_sha256 = \"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", \"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", ], licenses", "= \"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", \"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar\", ], licenses =", "Version 2.0 ) def junit(): java_import_external( name = \"junit\", jar_sha256", "\"\"\"Sets up repositories for browsers defined in //browsers/.... This should", "to prevent importing specific dependencies. \"\"\" if should_create_repository(\"bazel_skylib\", kwargs): bazel_skylib()", "java_import_external( name = \"org_hamcrest_core\", jar_sha256 = \"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9\", jar_urls = [", "amd64_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz\", ], macos_sha256 = \"920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66\", macos_urls =", "\"1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", \"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar\", ], licenses = [\"notice\"],", "The args dict will be mutated to remove \"omit_\" +", "\"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", ], licenses = [\"notice\"],", "\"@commons_codec\", ], ) def org_apache_httpcomponents_httpcore(): java_import_external( name = \"org_apache_httpcomponents_httpcore\", jar_sha256", "2.0 deps = [\"@com_google_code_findbugs_jsr305\"], ) def org_apache_commons_exec(): java_import_external( name =", "True def browser_repositories(firefox = False, chromium = False, sauce =", "com_google_code_gson() if should_create_repository( \"com_google_errorprone_error_prone_annotations\", kwargs, ): com_google_errorprone_error_prone_annotations() if should_create_repository(\"com_google_guava\", kwargs):", "def com_github_tebeka_selenium(): go_repository( name = \"com_github_tebeka_selenium\", importpath = \"github.com/tebeka/selenium\", sha256", ") def commons_logging(): java_import_external( name = \"commons_logging\", jar_sha256 = \"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636\",", "= [\"reciprocal\"], # MPL 2.0 amd64_sha256 = \"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de\", amd64_urls =", "name = \"com_google_code_findbugs_jsr305\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", \"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", ], jar_sha256", "jar_sha256 = \"518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", \"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", ], licenses", "\"com_google_code_findbugs_jsr305\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", \"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", ], jar_sha256 = \"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7\",", "java_import_external( name = \"org_jetbrains_kotlin_stdlib\", jar_sha256 = \"62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e\", jar_urls = [", "], ) def org_apache_httpcomponents_httpcore(): java_import_external( name = \"org_apache_httpcomponents_httpcore\", jar_sha256 =", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", \"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar\", ], licenses = [\"notice\"], # New", "Rules. This function exists for other Bazel projects to call", "= \"org_apache_commons_exec\", jar_sha256 = \"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", \"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\",", "[ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip\", ], macos_sha256 = \"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae\", macos_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip\",", "= \"com_google_code_findbugs_jsr305\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", \"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", ], jar_sha256 =", "= [ \"@com_google_code_gson\", \"@com_google_guava\", \"@net_bytebuddy\", \"@com_squareup_okhttp3_okhttp\", \"@com_squareup_okio\", \"@commons_codec\", \"@commons_logging\", \"@org_apache_commons_exec\",", "platform_http_file( name = \"org_mozilla_geckodriver\", licenses = [\"reciprocal\"], # MPL 2.0", "used by calling the individual functions this method references. Please", "2.0 testonly_ = 1, ) def org_seleniumhq_selenium_remote_driver(): java_import_external( name =", "if should_create_repository( \"com_google_errorprone_error_prone_annotations\", kwargs, ): com_google_errorprone_error_prone_annotations() if should_create_repository(\"com_google_guava\", kwargs): com_google_guava()", "licenses = [\"notice\"], # Apache 2.0 exports = [ \"@com_google_code_findbugs_jsr305\",", "if chromium: org_chromium_chromedriver() org_chromium_chromium() if firefox: org_mozilla_firefox() org_mozilla_geckodriver() if sauce:", "Software License, Version 2.0 ) def org_json(): java_import_external( name =", "Configure repositories for //browsers:chromium-native. sauce: Configure repositories for //browser/sauce:chrome-win10. \"\"\"", "], ) def org_seleniumhq_selenium_api(): java_import_external( name = \"org_seleniumhq_selenium_api\", jar_sha256 =", "required by Webtesting Rules. This function exists for other Bazel", "deps = [ \"@org_apache_httpcomponents_httpcore\", \"@commons_logging\", \"@commons_codec\", ], ) def org_apache_httpcomponents_httpcore():", "\"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar\", ], licenses = [\"notice\"], # MIT-style license ) def", "asynchronous review process. They must # be greppable for that", "This is useful for users who want to be rigorous", "name = \"com_github_gorilla_context\", importpath = \"github.com/gorilla/context\", sha256 = \"2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03\", strip_prefix", "will check to see if a repository has been previously", "= [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", ], licenses = [\"notice\"], # Apache", "\"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz\", ], ) def com_github_tebeka_selenium(): go_repository( name = \"com_github_tebeka_selenium\", importpath", "windows_sha256 = \"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5\", windows_urls = [ \"https://saucelabs.com/downloads/sc-4.4.12-win32.zip\", ], ) def", "= [\"notice\"], # Apache License, Version 2.0 ) def commons_logging():", "com_github_urllib3() if should_create_repository(\"com_google_code_findbugs_jsr305\", kwargs): com_google_code_findbugs_jsr305() if should_create_repository(\"com_google_code_gson\", kwargs): com_google_code_gson() if", "see if a repository has been previously defined before defining", "java_import_external( name = \"net_bytebuddy\", jar_sha256 = \"4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2\", jar_urls = [", "\"https://github.com/blang/semver/archive/v3.5.1.tar.gz\", ], ) def com_github_gorilla_context(): go_repository( name = \"com_github_gorilla_context\", importpath", "= \"com_github_blang_semver\", importpath = \"github.com/blang/semver\", sha256 = \"3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57\", strip_prefix =", "should_create_repository(\"org_apache_httpcomponents_httpcore\", kwargs): org_apache_httpcomponents_httpcore() if should_create_repository(\"org_hamcrest_core\", kwargs): org_hamcrest_core() if should_create_repository(\"org_jetbrains_kotlin_stdlib\", kwargs):", "should_create_repository(\"com_github_gorilla_context\", kwargs): com_github_gorilla_context() if should_create_repository(\"com_github_gorilla_mux\", kwargs): com_github_gorilla_mux() if should_create_repository(\"com_github_tebeka_selenium\", kwargs):", "[ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip\", ], macos_sha256 = \"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32\", macos_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip\",", "kwargs): org_apache_httpcomponents_httpcore() if should_create_repository(\"org_hamcrest_core\", kwargs): org_hamcrest_core() if should_create_repository(\"org_jetbrains_kotlin_stdlib\", kwargs): org_jetbrains_kotlin_stdlib()", "def com_google_code_findbugs_jsr305(): java_import_external( name = \"com_google_code_findbugs_jsr305\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\",", "def com_github_gorilla_context(): go_repository( name = \"com_github_gorilla_context\", importpath = \"github.com/gorilla/context\", sha256", "Eclipse Public License 1.0 testonly_ = 1, deps = [\"@org_hamcrest_core\"],", ") def org_jetbrains_kotlin_stdlib(): java_import_external( name = \"org_jetbrains_kotlin_stdlib\", jar_sha256 = \"62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e\",", "a rule for the repository. The args dict will be", "\"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", ], macos_sha256 = \"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5\", macos_urls = [ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg\",", "\"@com_squareup_okio\", \"@com_google_code_findbugs_jsr305\", ], ) def com_squareup_okio(): java_import_external( name = \"com_squareup_okio\",", "[ \"https://saucelabs.com/downloads/sc-4.5.1-osx.zip\", ], windows_sha256 = \"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5\", windows_urls = [ \"https://saucelabs.com/downloads/sc-4.4.12-win32.zip\",", "], macos_sha256 = \"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32\", macos_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip\", ], windows_sha256", "The name of the repository that should be checked. args:", "users who want to be rigorous about declaring their own", "org_json() if should_create_repository(\"org_seleniumhq_py\", kwargs): org_seleniumhq_py() if should_create_repository(\"org_seleniumhq_selenium_api\", kwargs): org_seleniumhq_selenium_api() if", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "[ \"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", \"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2\", ], macos_sha256 = \"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5\", macos_urls = [", "com_github_gorilla_mux() if should_create_repository(\"com_github_tebeka_selenium\", kwargs): com_github_tebeka_selenium() if should_create_repository(\"com_github_urllib3\", kwargs): com_github_urllib3() if", "\"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6\", amd64_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", ], macos_sha256 = \"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce\",", "sha256 = \"2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03\", strip_prefix = \"context-1.1.1\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz\",", "built that depends on them. Args: **kwargs: omit_... parameters used", "= \"org_apache_httpcomponents_httpcore\", jar_sha256 = \"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar\",", "for browsers defined in //browsers/.... This should only be used", "[ \"@com_google_code_gson\", \"@com_google_guava\", \"@net_bytebuddy\", \"@com_squareup_okhttp3_okhttp\", \"@com_squareup_okio\", \"@commons_codec\", \"@commons_logging\", \"@org_apache_commons_exec\", \"@org_apache_httpcomponents_httpclient\",", "= [\"notice\"], # BSD 3-clause ) def com_google_code_gson(): java_import_external( name", "\"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", \"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar\", ], licenses = [\"notice\"],", "MPL 2.0 amd64_sha256 = \"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6\", amd64_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\",", "kwargs, ): com_google_errorprone_error_prone_annotations() if should_create_repository(\"com_google_guava\", kwargs): com_google_guava() if should_create_repository(\"com_squareup_okhttp3_okhttp\", kwargs):", "jar_sha256 = \"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", \"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar\", ], licenses", "be created. This allows creation of a repository to be", "function exists for other Bazel projects to call from their", "urls = [ \"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", \"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz\", ], ) def com_github_urllib3(): http_archive(", "+ str(kwargs.keys())) def should_create_repository(name, args): \"\"\"Returns whether the name repository", "= \"com_github_tebeka_selenium\", importpath = \"github.com/tebeka/selenium\", sha256 = \"c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd\", strip_prefix =", "Version 2.0 (the \"License\"); # you may not use this", "\"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", \"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", ], licenses = [\"notice\"], # The Apache Software", "= \"mux-1.6.2\", urls = [ \"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz\", \"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz\", ], ) def", "repository that should be checked. args: A dictionary that contains", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", \"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar\", ], licenses = [\"notice\"], # The Apache", "for //browser/sauce:chrome-win10. \"\"\" if chromium: org_chromium_chromedriver() org_chromium_chromium() if firefox: org_mozilla_firefox()", "val = args.pop(key) if val: return False if native.existing_rule(name): return", "workspace. This will check to see if a repository has", ") def com_github_gorilla_context(): go_repository( name = \"com_github_gorilla_context\", importpath = \"github.com/gorilla/context\",", "Apache Software License, Version 2.0 ) def org_json(): java_import_external( name", "by applicable law or agreed to in writing, software #", "if kwargs.keys(): print(\"The following parameters are unknown: \" + str(kwargs.keys()))", "[ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", \"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", ], licenses = [\"notice\"], # The Apache", "], licenses = [\"reciprocal\"], # Eclipse Public License 1.0 testonly_", "\"a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf\", strip_prefix = \"urllib3-1.23\", urls = [ \"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz\", ], )", "of a repository to be disabled by either an \"omit_\"", "[ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip\", ], ) def org_hamcrest_core(): java_import_external( name = \"org_hamcrest_core\",", "Args: **kwargs: omit_... parameters used to prevent importing specific dependencies.", "kwargs): com_google_code_findbugs_jsr305() if should_create_repository(\"com_google_code_gson\", kwargs): com_google_code_gson() if should_create_repository( \"com_google_errorprone_error_prone_annotations\", kwargs,", "Academic Free License v. 2.0, BSD 2-clause, MIT amd64_sha256 =", "= \"github.com/gorilla/context\", sha256 = \"2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03\", strip_prefix = \"context-1.1.1\", urls =", "own browsers. Args: firefox: Configure repositories for //browsers:firefox-native. chromium: Configure", "if should_create_repository(\"com_squareup_okhttp3_okhttp\", kwargs): com_squareup_okhttp3_okhttp() if should_create_repository(\"com_squareup_okio\", kwargs): com_squareup_okio() if should_create_repository(\"commons_codec\",", "def com_google_errorprone_error_prone_annotations(): java_import_external( name = \"com_google_errorprone_error_prone_annotations\", jar_sha256 = \"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120\", jar_urls", "False): \"\"\"Sets up repositories for browsers defined in //browsers/.... This", "on an experimental basis; projects should define their own browsers.", "return False return True def browser_repositories(firefox = False, chromium =", "BSD License testonly_ = 1, ) def org_jetbrains_kotlin_stdlib(): java_import_external( name", "\"http_archive\") load(\"@bazel_tools//tools/build_defs/repo:java.bzl\", \"java_import_external\") # NOTE: URLs are mirrored by an", "\"net_bytebuddy\", jar_sha256 = \"4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", \"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", ],", "//browsers/.... This should only be used on an experimental basis;", "strip_prefix = \"bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a\", urls = [ \"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", \"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz\", ], )", "sauce = False): \"\"\"Sets up repositories for browsers defined in", "dictionary that contains \"omit_...\": bool pairs. Returns: boolean indicating whether", "applicable law or agreed to in writing, software # distributed", "# Apache License, Version 2.0 ) def org_chromium_chromedriver(): platform_http_file( name", "\"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c\", windows_urls = [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip\", ], ) def org_hamcrest_core(): java_import_external(", "BSD 3-clause (maybe more?) amd64_sha256 = \"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8\", amd64_urls = [", "\"org_jetbrains_kotlin_stdlib\", jar_sha256 = \"62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", \"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar\", ],", "or when another Bazel project is depended upon (e.g. rules_closure)", "= [ \"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz\", \"https://github.com/gorilla/context/archive/v1.1.1.tar.gz\", ], ) def com_github_gorilla_mux(): go_repository( name", "should_create_repository(\"com_github_urllib3\", kwargs): com_github_urllib3() if should_create_repository(\"com_google_code_findbugs_jsr305\", kwargs): com_google_code_findbugs_jsr305() if should_create_repository(\"com_google_code_gson\", kwargs):", "platform_http_file( name = \"com_saucelabs_sauce_connect\", licenses = [\"by_exception_only\"], # SauceLabs EULA", "2.0 deps = [ \"@com_squareup_okio\", \"@com_google_code_findbugs_jsr305\", ], ) def com_squareup_okio():", "jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", \"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar\", ], jar_sha256 = \"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7\", licenses", "(e.g. com_google_guava.) Alternatively, a whitelist model may be used by", "disabled by either an \"omit_\" _+ name parameter or by", "def should_create_repository(name, args): \"\"\"Returns whether the name repository should be", "for the repository. The args dict will be mutated to", "= [\"notice\"], # Apache 2.0 deps = [\"@com_google_code_findbugs_jsr305\"], ) def", "# You may obtain a copy of the License at", "a repository has been previously defined before defining a new", "if should_create_repository(\"org_json\", kwargs): org_json() if should_create_repository(\"org_seleniumhq_py\", kwargs): org_seleniumhq_py() if should_create_repository(\"org_seleniumhq_selenium_api\",", "# Apache License, Version 2.0 ) def commons_logging(): java_import_external( name", "= [\"@com_google_code_findbugs_jsr305\"], ) def org_apache_commons_exec(): java_import_external( name = \"org_apache_commons_exec\", jar_sha256", "browsers defined in //browsers/.... This should only be used on", "name = \"org_seleniumhq_selenium_remote_driver\", jar_sha256 = \"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\",", "rules_webtesting using http_archive. This function makes it easy to import", "= \"net_bytebuddy\", jar_sha256 = \"4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", \"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\",", "= [\"notice\"], # Apache License, Version 2.0 ) def org_chromium_chromedriver():", "], licenses = [\"notice\"], # Apache 2.0 exports = [", "be mutated to remove \"omit_\" + name. Args: name: The", "amd64_urls = [ \"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip\", ], macos_sha256 = \"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae\", macos_urls =", "import these transitive dependencies into the parent workspace. This will", "happen. It's OK to submit broken mirror # URLs, so", "\"com_squareup_okio\", jar_sha256 = \"79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", \"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar\", ],", "if should_create_repository(\"org_seleniumhq_selenium_api\", kwargs): org_seleniumhq_selenium_api() if should_create_repository(\"org_seleniumhq_selenium_remote_driver\", kwargs): org_seleniumhq_selenium_remote_driver() if kwargs.keys():", "if should_create_repository(\"org_seleniumhq_py\", kwargs): org_seleniumhq_py() if should_create_repository(\"org_seleniumhq_selenium_api\", kwargs): org_seleniumhq_selenium_api() if should_create_repository(\"org_seleniumhq_selenium_remote_driver\",", "licenses = [\"reciprocal\"], # Eclipse Public License 1.0 testonly_ =", "sha256 = \"3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57\", strip_prefix = \"semver-3.5.1\", urls = [ \"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz\",", "experimental basis; projects should define their own browsers. Args: firefox:", "com_google_code_gson(): java_import_external( name = \"com_google_code_gson\", jar_sha256 = \"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81\", jar_urls =", "], licenses = [\"notice\"], # Apache 2.0 ) def com_google_guava():", "for that to happen. It's OK to submit broken mirror", "\"dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52\", amd64_urls = [ \"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz\", ], macos_sha256 = \"920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66\", macos_urls", ") def com_saucelabs_sauce_connect(): platform_http_file( name = \"com_saucelabs_sauce_connect\", licenses = [\"by_exception_only\"],", "= \"org_apache_httpcomponents_httpclient\", jar_sha256 = \"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7\", jar_urls = [ \"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\", \"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar\",", "key in args: val = args.pop(key) if val: return False", "This allows creation of a repository to be disabled by", "deps = [ \"@com_squareup_okio\", \"@com_google_code_findbugs_jsr305\", ], ) def com_squareup_okio(): java_import_external(", "if should_create_repository(\"org_apache_commons_exec\", kwargs): org_apache_commons_exec() if should_create_repository(\"org_apache_httpcomponents_httpclient\", kwargs): org_apache_httpcomponents_httpclient() if should_create_repository(\"org_apache_httpcomponents_httpcore\",", "\"License\"); # you may not use this file except in", "2.0 amd64_sha256 = \"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6\", amd64_urls = [ \"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", \"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz\", ],", "= [ \"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip\", ], ) def org_hamcrest_core(): java_import_external( name =", "\"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar\", ], licenses = [\"notice\"], # The Apache Software License,", ") def com_github_urllib3(): http_archive( name = \"com_github_urllib3\", build_file = str(Label(\"//build_files:com_github_urllib3.BUILD\")),", "\"https://saucelabs.com/downloads/sc-4.4.12-win32.zip\", ], ) def com_squareup_okhttp3_okhttp(): java_import_external( name = \"com_squareup_okhttp3_okhttp\", jar_urls", "commons_logging(): java_import_external( name = \"commons_logging\", jar_sha256 = \"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636\", jar_urls =", "], jar_sha256 = \"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7\", licenses = [\"notice\"], # BSD 3-clause", "def org_mozilla_firefox(): platform_http_file( name = \"org_mozilla_firefox\", licenses = [\"reciprocal\"], #", "needed by rules_webtesting.\"\"\" load(\"//web/internal:platform_http_file.bzl\", \"platform_http_file\") load(\"@bazel_gazelle//:deps.bzl\", \"go_repository\") load(\"@bazel_tools//tools/build_defs/repo:http.bzl\", \"http_archive\") load(\"@bazel_tools//tools/build_defs/repo:java.bzl\",", "dependencies may be excluded with an \"omit_\" + name parameter.", "], ) def org_hamcrest_core(): java_import_external( name = \"org_hamcrest_core\", jar_sha256 =", "if should_create_repository(\"com_google_guava\", kwargs): com_google_guava() if should_create_repository(\"com_squareup_okhttp3_okhttp\", kwargs): com_squareup_okhttp3_okhttp() if should_create_repository(\"com_squareup_okio\",", "permissions and # limitations under the License. \"\"\"Defines external repositories", "], ) def com_squareup_okhttp3_okhttp(): java_import_external( name = \"com_squareup_okhttp3_okhttp\", jar_urls =", "= [\"notice\"], # The Apache Software License, Version 2.0 )", "from their WORKSPACE file when depending on rules_webtesting using http_archive.", "sha256 = \"a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf\", strip_prefix = \"urllib3-1.23\", urls = [ \"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz\",", "# Apache 2.0 ) def com_google_guava(): java_import_external( name = \"com_google_guava\",", "\"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", \"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar\", ], licenses = [\"notice\"], # Apache 2.0 deps" ]
[ "# if os.path.isfile(pickle_dir): # with open(pickle_dir, 'rb') as fp: #", "tf.train.Coordinator() } # task = '{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs) CONFIG_DIR =", "threading import init_paths from models.sample_models import * target_tasks = \"autoencoder", "cfg[ 'model_path' ] ) ############## Start dataloading workers ############## data_prefetch_init_fn", "whether to use train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg ) RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time", "task == 'jigsaw': continue cfg['model_path'] = os.path.join( cfg['log_root'], task, 'model.permanent-ckpt'", "'data_idxs' ], m.decoder_output, m.total_loss] ) if task == 'segment2d' or", "multiprocessing import Pool import numpy as np import os import", "use_filename_queue=False ) # is_training determines whether to use train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables(", "parser.parse_args() idx_to_run = args.idx if idx_to_run == -1: pairs_to_run =", "== -1: pairs_to_run = pairs else: pairs_to_run = pairs[idx_to_run:idx_to_run+1] def", "seen.add return [x for x in seq if not (x", "def blockPrint(): sys.stdout = open(os.devnull, 'w') # Restore def enablePrint():", "continue cfg['model_path'] = os.path.join( cfg['log_root'], task, 'model.permanent-ckpt' ) print( cfg['model_path'])", "lower_dim.min()) / (lower_dim.max() - lower_dim.min()) x[i] = lower_dim predicted =", "task == 'segment25d': from sklearn.decomposition import PCA x = np.zeros((32,256,256,3),", ") # is_training determines whether to use train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs,", "1. else: ( input_batch, target_batch, mask_batch, data_idx, predicted, loss, )", "data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction import lib.data.load_ops as load_ops tf.logging.set_verbosity(tf.logging.ERROR) all_outputs =", "m.total_loss] ) mask_batch = 1. else: ( input_batch, target_batch, mask_batch,", "Single Task') parser.add_argument('--idx', dest='idx', help='Task to run', type=int) parser.add_argument('--hs', dest='hs',", "as load_ops tf.logging.set_verbosity(tf.logging.ERROR) all_outputs = {} pickle_dir = 'viz_output_single_task.pkl' import", "continue ############## Set Up Inputs ############## # tf.logging.set_verbosity( tf.logging.INFO )", "dest='idx', help='Task to run', type=int) parser.add_argument('--hs', dest='hs', help='Hidden size to", "import Pool import numpy as np import os import pdb", "print(\"Done: {}\".format(task)) # os.system(\"sudo cp {d} /home/ubuntu/s3/model_log\".format(d=pickle_dir)) ############## Reset graph", "models to run in parallel', type=int) parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out =", "def run_to_task(task_to): import general_utils from general_utils import RuntimeDeterminedEnviromentVars import models.architectures", "cfg['randomize'] = False root_dir = cfg['root_dir'] cfg['num_read_threads'] = 1 print(cfg['log_root'])", "m.decoder_output, m.total_loss] ) mask_batch = 1. else: ( input_batch, target_batch,", "training_runners['sess'].run( [ m.input_images, m.targets, model[ 'data_idxs' ], m.decoder_output, m.total_loss] )", "set() seen_add = seen.add return [x for x in seq", "else cfg['val_filenames'] cfg['train_filenames'] = split_file cfg['val_filenames'] = split_file cfg['test_filenames'] =", "Reset graph and paths ############## tf.reset_default_graph() training_runners['sess'].close() try: del sys.modules[", "task in all_outputs: print(\"{} already exists....\\n\\n\\n\".format(task)) continue print(\"Doing {task}\".format(task=task)) general_utils", "training_runners[ 'sess' ], training_runners[ 'coord' ] )) prefetch_threads.start() ############## Run", "{} s3://task-preprocessing-512-oregon/visualizations/\".format(pickle_dir), shell=True) except: subprocess.call(\"sudo cp {} /home/ubuntu/s3/visualizations/\".format(pickle_dir), shell=True) return", "def remove_dups(seq): seen = set() seen_add = seen.add return [x", "RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time = time.time() # utils.print_start_info( cfg, inputs[ 'max_steps' ],", "use train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg ) RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time = time.time()", "fix_pose impainting_whole jigsaw keypoint2d keypoint3d non_fixated_pose point_match reshade rgb2depth rgb2mist", "list_of_tasks: if task in all_outputs: print(\"{} already exists....\\n\\n\\n\".format(task)) continue print(\"Doing", "'saver_op' ].restore( training_runners[ 'sess' ], cfg[ 'model_path' ] ) ##############", "blockPrint() def remove_dups(seq): seen = set() seen_add = seen.add return", "import init_paths from models.sample_models import * target_tasks = \"autoencoder colorization", "False parser = argparse.ArgumentParser(description='Viz Single Task') parser.add_argument('--idx', dest='idx', help='Task to", "'rb') as fp: all_outputs = pickle.load(fp) for task in list_of_tasks:", "with open(pickle_dir, 'rb') as fp: # all_outputs = pickle.load(fp) ##############", "is_training=False ) ############## Set Up Model ############## model = utils.setup_model(", "[ m.input_images, m.targets, model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) mask_batch", "pickle.dump(all_outputs, fp) try: subprocess.call(\"aws s3 cp {} s3://task-preprocessing-512-oregon/visualizations/\".format(pickle_dir), shell=True) except:", "non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm room_layout segment25d segment2d vanishing_point_well_defined", "].join() # if os.path.isfile(pickle_dir): # with open(pickle_dir, 'rb') as fp:", "= split_file cfg['val_filenames'] = split_file cfg['test_filenames'] = split_file cfg['num_epochs'] =", "range(predicted.shape[0]): embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64)) pca = PCA(n_components=3) pca.fit(embedding_flattened) lower_dim =", "blockPrint(): sys.stdout = open(os.devnull, 'w') # Restore def enablePrint(): sys.stdout", "in range(predicted.shape[0]): embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64)) pca = PCA(n_components=3) pca.fit(embedding_flattened) lower_dim", "= utils.get_data_prefetch_threads_init_fn( inputs, cfg, is_training=ON_TEST_SET, use_filename_queue=False ) prefetch_threads = threading.Thread(", "############## Set Up Inputs ############## # tf.logging.set_verbosity( tf.logging.INFO ) inputs", "Start dataloading workers ############## data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn( inputs, cfg, is_training=ON_TEST_SET,", "(lower_dim - lower_dim.min()) / (lower_dim.max() - lower_dim.min()) x[i] = lower_dim", "= 1. else: ( input_batch, target_batch, mask_batch, data_idx, predicted, loss,", "lib.data.load_ops as load_ops tf.logging.set_verbosity(tf.logging.ERROR) all_outputs = {} pickle_dir = 'viz_output_single_task.pkl'", "import pickle import subprocess import sys import tensorflow as tf", "None: continue ############## Set Up Inputs ############## # tf.logging.set_verbosity( tf.logging.INFO", "models.architectures as architectures from data.load_ops import resize_rescale_image import utils from", "seen or seen_add(x))] pairs = list(itertools.product(list_of_tasks, list_of_tasks)) args = parser.parse_args()", "as fp: # all_outputs = pickle.load(fp) ############## Store to dict", "predicted, loss, ) = training_runners['sess'].run( [ m.input_images, m.targets, model[ 'data_idxs'", "run', type=int) parser.add_argument('--hs', dest='hs', help='Hidden size to use', type=int) parser.add_argument('--n-parallel',", "# with open(pickle_dir, 'rb') as fp: # all_outputs = pickle.load(fp)", "= x ############## Clean Up ############## training_runners[ 'coord' ].request_stop() training_runners[", "target_batch, 'mask': mask_batch, 'data_idx':data_idx, 'output':predicted} all_outputs[task] = to_store print(\"Done: {}\".format(task))", "== 'segment25d': from sklearn.decomposition import PCA x = np.zeros((32,256,256,3), dtype='float')", "split_file = cfg['test_filenames'] if ON_TEST_SET else cfg['val_filenames'] cfg['train_filenames'] = split_file", "= split_file cfg['num_epochs'] = 1 cfg['randomize'] = False root_dir =", "training_runners['sess'].run( [ m.input_images, m.targets, m.masks, model[ 'data_idxs' ], m.decoder_output, m.total_loss]", "tensorflow as tf import tensorflow.contrib.slim as slim import threading import", "'/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ############## Load Configs ############## cfg = utils.load_config( CONFIG_DIR, nopause=True", "############## model = utils.setup_model( inputs, cfg, is_training=IN_TRAIN_MODE ) m =", "= ipython_std_out # Force Print def forcePrint(str): enablePrint() print(str) sys.stdout.flush()", "pairs = list(itertools.product(list_of_tasks, list_of_tasks)) args = parser.parse_args() idx_to_run = args.idx", "load_and_specify_preprocessors_for_representation_extraction import lib.data.load_ops as load_ops tf.logging.set_verbosity(tf.logging.ERROR) all_outputs = {} pickle_dir", "general_utils import RuntimeDeterminedEnviromentVars import models.architectures as architectures from data.load_ops import", "import * target_tasks = \"autoencoder colorization curvature denoise edge2d edge3d", "input_batch, target_batch, mask_batch, data_idx, predicted, loss, ) = training_runners['sess'].run( [", "dtype='float') for i in range(predicted.shape[0]): embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64)) pca =", "import tensorflow.contrib.slim as slim import threading import init_paths from models.sample_models", "############## tf.reset_default_graph() training_runners['sess'].close() try: del sys.modules[ 'config' ] except: pass", "m.input_images, m.targets, model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) mask_batch =", "target_tasks = \"autoencoder colorization curvature denoise edge2d edge3d ego_motion fix_pose", "'sess' ], cfg[ 'model_path' ] ) ############## Start dataloading workers", "open( pickle_dir, 'wb') as fp: pickle.dump(all_outputs, fp) try: subprocess.call(\"aws s3", "= os.path.join( cfg['log_root'], task, 'model.permanent-ckpt' ) print( cfg['model_path']) if cfg['model_path']", "target_tasks.split(\" \") ON_TEST_SET = True IN_TRAIN_MODE = False parser =", "all_outputs = {} pickle_dir = 'viz_output_single_task.pkl' import os if os.path.isfile(pickle_dir):", "model[ 'saver_op' ].restore( training_runners[ 'sess' ], cfg[ 'model_path' ] )", "import general_utils from general_utils import RuntimeDeterminedEnviromentVars import models.architectures as architectures", "Set Up Inputs ############## # tf.logging.set_verbosity( tf.logging.INFO ) inputs =", "import itertools import time from multiprocessing import Pool import numpy", "else: pairs_to_run = pairs[idx_to_run:idx_to_run+1] def run_to_task(task_to): import general_utils from general_utils", "workers ############## data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn( inputs, cfg, is_training=ON_TEST_SET, use_filename_queue=False )", "Inputs ############## # tf.logging.set_verbosity( tf.logging.INFO ) inputs = utils.setup_input( cfg,", "Disabe def blockPrint(): sys.stdout = open(os.devnull, 'w') # Restore def", "resize_rescale_image import utils from data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction import lib.data.load_ops as", "prefetch_threads = threading.Thread( target=data_prefetch_init_fn, args=( training_runners[ 'sess' ], training_runners[ 'coord'", "'data_idxs' ], m.decoder_output, m.total_loss] ) mask_batch = 1. else: (", "pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim = (lower_dim - lower_dim.min()) / (lower_dim.max() - lower_dim.min())", "cfg['val_filenames'] = split_file cfg['test_filenames'] = split_file cfg['num_epochs'] = 1 cfg['randomize']", "m.masks, model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) if task ==", "list(itertools.product(list_of_tasks, list_of_tasks)) args = parser.parse_args() idx_to_run = args.idx if idx_to_run", "True IN_TRAIN_MODE = False parser = argparse.ArgumentParser(description='Viz Single Task') parser.add_argument('--idx',", "segment25d segment2d vanishing_point_well_defined segmentsemantic_rb class_selected class_1000\" list_of_tasks = target_tasks.split(\" \")", "predicted, loss, ) = training_runners['sess'].run( [ m.input_images, m.targets, m.masks, model[", "rgb2depth rgb2mist rgb2sfnorm room_layout segment25d segment2d vanishing_point_well_defined segmentsemantic_rb class_selected class_1000\"", "training_runners[ 'coord' ].request_stop() training_runners[ 'coord' ].join() # if os.path.isfile(pickle_dir): #", "from general_utils import RuntimeDeterminedEnviromentVars import models.architectures as architectures from data.load_ops", "to run in parallel', type=int) parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out = sys.stdout", "size to use', type=int) parser.add_argument('--n-parallel', dest='n_parallel', help='Number of models to", "sys.stdout.flush() blockPrint() def remove_dups(seq): seen = set() seen_add = seen.add", "= args.idx if idx_to_run == -1: pairs_to_run = pairs else:", "architectures from data.load_ops import resize_rescale_image import utils from data.task_data_loading import", "run in parallel', type=int) parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out = sys.stdout #", "subprocess.call(\"sudo cp {} /home/ubuntu/s3/visualizations/\".format(pickle_dir), shell=True) return if __name__ == '__main__':", "Set Up Model ############## model = utils.setup_model( inputs, cfg, is_training=IN_TRAIN_MODE", "reshade rgb2depth rgb2mist rgb2sfnorm room_layout segment25d segment2d vanishing_point_well_defined segmentsemantic_rb class_selected", "init_paths from models.sample_models import * target_tasks = \"autoencoder colorization curvature", "numpy as np import os import pdb import pickle import", "# utils.print_start_info( cfg, inputs[ 'max_steps' ], is_training=False ) ############## Set", "split_file cfg['val_filenames'] = split_file cfg['test_filenames'] = split_file cfg['num_epochs'] = 1", "import time from multiprocessing import Pool import numpy as np", "seen = set() seen_add = seen.add return [x for x", "'segment2d' or task == 'segment25d': from sklearn.decomposition import PCA x", "'sess' ], training_runners[ 'coord' ] )) prefetch_threads.start() ############## Run First", "enablePrint(): sys.stdout = ipython_std_out # Force Print def forcePrint(str): enablePrint()", "if task == 'segment2d' or task == 'segment25d': from sklearn.decomposition", "cfg['val_filenames'] cfg['train_filenames'] = split_file cfg['val_filenames'] = split_file cfg['test_filenames'] = split_file", "forcePrint(str): enablePrint() print(str) sys.stdout.flush() blockPrint() def remove_dups(seq): seen = set()", "if cfg['model_path'] is None: continue ############## Set Up Inputs ##############", "Batch ############## if not hasattr(m, 'masks'): ( input_batch, target_batch, data_idx,", "open( pickle_dir, 'rb') as fp: all_outputs = pickle.load(fp) for task", "* target_tasks = \"autoencoder colorization curvature denoise edge2d edge3d ego_motion", "= threading.Thread( target=data_prefetch_init_fn, args=( training_runners[ 'sess' ], training_runners[ 'coord' ]", "np.squeeze(predicted[i]).reshape((-1,64)) pca = PCA(n_components=3) pca.fit(embedding_flattened) lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim =", "if task in all_outputs: print(\"{} already exists....\\n\\n\\n\".format(task)) continue print(\"Doing {task}\".format(task=task))", "Print def forcePrint(str): enablePrint() print(str) sys.stdout.flush() blockPrint() def remove_dups(seq): seen", "load_ops tf.logging.set_verbosity(tf.logging.ERROR) all_outputs = {} pickle_dir = 'viz_output_single_task.pkl' import os", "mask_batch = 1. else: ( input_batch, target_batch, mask_batch, data_idx, predicted,", "sys.path = remove_dups(sys.path) print(\"FINISHED: {}\\n\\n\\n\\n\\n\\n\".format(task)) pickle_dir = 'viz_output_single_task.pkl' with open(", "print(\"FINISHED: {}\\n\\n\\n\\n\\n\\n\".format(task)) pickle_dir = 'viz_output_single_task.pkl' with open( pickle_dir, 'wb') as", "tf.logging.INFO ) inputs = utils.setup_input( cfg, is_training=ON_TEST_SET, use_filename_queue=False ) #", "'__main__': run_to_task(None) # with Pool(args.n_parallel) as p: # p.map(run_to_task, list_of_tasks)", "pickle_dir, 'wb') as fp: pickle.dump(all_outputs, fp) try: subprocess.call(\"aws s3 cp", "list_of_tasks = target_tasks.split(\" \") ON_TEST_SET = True IN_TRAIN_MODE = False", "tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out = sys.stdout # Disabe def blockPrint(): sys.stdout =", "'coord': tf.train.Coordinator() } # task = '{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs) CONFIG_DIR", "import pdb import pickle import subprocess import sys import tensorflow", "{ 'input': input_batch, 'target': target_batch, 'mask': mask_batch, 'data_idx':data_idx, 'output':predicted} all_outputs[task]", "use', type=int) parser.add_argument('--n-parallel', dest='n_parallel', help='Number of models to run in", "Up Inputs ############## # tf.logging.set_verbosity( tf.logging.INFO ) inputs = utils.setup_input(", "training_runners[ 'coord' ] )) prefetch_threads.start() ############## Run First Batch ##############", "cp {} s3://task-preprocessing-512-oregon/visualizations/\".format(pickle_dir), shell=True) except: subprocess.call(\"sudo cp {} /home/ubuntu/s3/visualizations/\".format(pickle_dir), shell=True)", "tf.logging.set_verbosity( tf.logging.INFO ) inputs = utils.setup_input( cfg, is_training=ON_TEST_SET, use_filename_queue=False )", "cfg['test_filenames'] if ON_TEST_SET else cfg['val_filenames'] cfg['train_filenames'] = split_file cfg['val_filenames'] =", "= list(itertools.product(list_of_tasks, list_of_tasks)) args = parser.parse_args() idx_to_run = args.idx if", "= 1 cfg['randomize'] = False root_dir = cfg['root_dir'] cfg['num_read_threads'] =", "cfg['num_epochs'] = 1 cfg['randomize'] = False root_dir = cfg['root_dir'] cfg['num_read_threads']", "all_outputs = pickle.load(fp) for task in list_of_tasks: if task in", "m.targets, model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) mask_batch = 1.", "model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) if task == 'segment2d'", "# all_outputs = pickle.load(fp) ############## Store to dict ############## to_store", "} # task = '{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs) CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task)", "import os import pdb import pickle import subprocess import sys", "fp: all_outputs = pickle.load(fp) for task in list_of_tasks: if task", "= \"autoencoder colorization curvature denoise edge2d edge3d ego_motion fix_pose impainting_whole", "help='Number of models to run in parallel', type=int) parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR)", ") = training_runners['sess'].run( [ m.input_images, m.targets, m.masks, model[ 'data_idxs' ],", "to use train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg ) RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time =", "RuntimeDeterminedEnviromentVars import models.architectures as architectures from data.load_ops import resize_rescale_image import", "= time.time() # utils.print_start_info( cfg, inputs[ 'max_steps' ], is_training=False )", "else: ( input_batch, target_batch, mask_batch, data_idx, predicted, loss, ) =", "fp: pickle.dump(all_outputs, fp) try: subprocess.call(\"aws s3 cp {} s3://task-preprocessing-512-oregon/visualizations/\".format(pickle_dir), shell=True)", "subprocess import sys import tensorflow as tf import tensorflow.contrib.slim as", "to dict ############## to_store = { 'input': input_batch, 'target': target_batch,", "pickle import subprocess import sys import tensorflow as tf import", "shell=True) return if __name__ == '__main__': run_to_task(None) # with Pool(args.n_parallel)", "hasattr(m, 'masks'): ( input_batch, target_batch, data_idx, predicted, loss, ) =", "= PCA(n_components=3) pca.fit(embedding_flattened) lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim = (lower_dim -", "type=int) parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out = sys.stdout # Disabe def blockPrint():", "not (x in seen or seen_add(x))] pairs = list(itertools.product(list_of_tasks, list_of_tasks))", "return [x for x in seq if not (x in", "segmentsemantic_rb class_selected class_1000\" list_of_tasks = target_tasks.split(\" \") ON_TEST_SET = True", "as architectures from data.load_ops import resize_rescale_image import utils from data.task_data_loading", ")) prefetch_threads.start() ############## Run First Batch ############## if not hasattr(m,", "mask_batch, data_idx, predicted, loss, ) = training_runners['sess'].run( [ m.input_images, m.targets,", "seen_add = seen.add return [x for x in seq if", "== '__main__': run_to_task(None) # with Pool(args.n_parallel) as p: # p.map(run_to_task,", "pca = PCA(n_components=3) pca.fit(embedding_flattened) lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim = (lower_dim", "= 'viz_output_single_task.pkl' import os if os.path.isfile(pickle_dir): with open( pickle_dir, 'rb')", "= seen.add return [x for x in seq if not", "embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64)) pca = PCA(n_components=3) pca.fit(embedding_flattened) lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1))", "slim import threading import init_paths from models.sample_models import * target_tasks", "parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out = sys.stdout # Disabe def blockPrint(): sys.stdout", "cfg, is_training=ON_TEST_SET, use_filename_queue=False ) # is_training determines whether to use", "############## Start dataloading workers ############## data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn( inputs, cfg,", "'sess': tf.InteractiveSession(), 'coord': tf.train.Coordinator() } # task = '{f}__{t}__{hs}'.format(f=task_from, t=task_to,", "'coord' ].join() # if os.path.isfile(pickle_dir): # with open(pickle_dir, 'rb') as", "ipython_std_out # Force Print def forcePrint(str): enablePrint() print(str) sys.stdout.flush() blockPrint()", "cp {d} /home/ubuntu/s3/model_log\".format(d=pickle_dir)) ############## Reset graph and paths ############## tf.reset_default_graph()", "lower_dim predicted = x ############## Clean Up ############## training_runners[ 'coord'", "import RuntimeDeterminedEnviromentVars import models.architectures as architectures from data.load_ops import resize_rescale_image", "sys.stdout = open(os.devnull, 'w') # Restore def enablePrint(): sys.stdout =", "in all_outputs: print(\"{} already exists....\\n\\n\\n\".format(task)) continue print(\"Doing {task}\".format(task=task)) general_utils =", "= utils.load_config( CONFIG_DIR, nopause=True ) RuntimeDeterminedEnviromentVars.register_dict( cfg ) split_file =", "task in list_of_tasks: if task in all_outputs: print(\"{} already exists....\\n\\n\\n\".format(task))", "i in range(predicted.shape[0]): embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64)) pca = PCA(n_components=3) pca.fit(embedding_flattened)", "rgb2sfnorm room_layout segment25d segment2d vanishing_point_well_defined segmentsemantic_rb class_selected class_1000\" list_of_tasks =", "remove_dups(sys.path) print(\"FINISHED: {}\\n\\n\\n\\n\\n\\n\".format(task)) pickle_dir = 'viz_output_single_task.pkl' with open( pickle_dir, 'wb')", "Run First Batch ############## if not hasattr(m, 'masks'): ( input_batch,", "args = parser.parse_args() idx_to_run = args.idx if idx_to_run == -1:", "'coord' ].request_stop() training_runners[ 'coord' ].join() # if os.path.isfile(pickle_dir): # with", "{d} /home/ubuntu/s3/model_log\".format(d=pickle_dir)) ############## Reset graph and paths ############## tf.reset_default_graph() training_runners['sess'].close()", "cfg['num_read_threads'] = 1 print(cfg['log_root']) if task == 'jigsaw': continue cfg['model_path']", "'data_idx':data_idx, 'output':predicted} all_outputs[task] = to_store print(\"Done: {}\".format(task)) # os.system(\"sudo cp", "= {} pickle_dir = 'viz_output_single_task.pkl' import os if os.path.isfile(pickle_dir): with", "import argparse import importlib import itertools import time from multiprocessing", "task = '{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs) CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ############## Load", "import subprocess import sys import tensorflow as tf import tensorflow.contrib.slim", "- lower_dim.min()) x[i] = lower_dim predicted = x ############## Clean", "is_training determines whether to use train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg )", "args.idx if idx_to_run == -1: pairs_to_run = pairs else: pairs_to_run", "open(os.devnull, 'w') # Restore def enablePrint(): sys.stdout = ipython_std_out #", "False root_dir = cfg['root_dir'] cfg['num_read_threads'] = 1 print(cfg['log_root']) if task", "Up ############## training_runners[ 'coord' ].request_stop() training_runners[ 'coord' ].join() # if", "], m.decoder_output, m.total_loss] ) if task == 'segment2d' or task", "'wb') as fp: pickle.dump(all_outputs, fp) try: subprocess.call(\"aws s3 cp {}", "(lower_dim.max() - lower_dim.min()) x[i] = lower_dim predicted = x ##############", "from __future__ import absolute_import, division, print_function import argparse import importlib", "utils.print_start_info( cfg, inputs[ 'max_steps' ], is_training=False ) ############## Set Up", "all_outputs = pickle.load(fp) ############## Store to dict ############## to_store =", "].request_stop() training_runners[ 'coord' ].join() # if os.path.isfile(pickle_dir): # with open(pickle_dir,", "open(pickle_dir, 'rb') as fp: # all_outputs = pickle.load(fp) ############## Store", "as slim import threading import init_paths from models.sample_models import *", ") print( cfg['model_path']) if cfg['model_path'] is None: continue ############## Set", "= pickle.load(fp) ############## Store to dict ############## to_store = {", "from models.sample_models import * target_tasks = \"autoencoder colorization curvature denoise", "print(\"Doing {task}\".format(task=task)) general_utils = importlib.reload(general_utils) tf.reset_default_graph() training_runners = { 'sess':", "help='Task to run', type=int) parser.add_argument('--hs', dest='hs', help='Hidden size to use',", "not hasattr(m, 'masks'): ( input_batch, target_batch, data_idx, predicted, loss, )", "Up Model ############## model = utils.setup_model( inputs, cfg, is_training=IN_TRAIN_MODE )", "] ) ############## Start dataloading workers ############## data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn(", "graph and paths ############## tf.reset_default_graph() training_runners['sess'].close() try: del sys.modules[ 'config'", "] model[ 'saver_op' ].restore( training_runners[ 'sess' ], cfg[ 'model_path' ]", "dest='n_parallel', help='Number of models to run in parallel', type=int) parser.set_defaults(n_parallel=1)", "pickle_dir = 'viz_output_single_task.pkl' import os if os.path.isfile(pickle_dir): with open( pickle_dir,", "1 cfg['randomize'] = False root_dir = cfg['root_dir'] cfg['num_read_threads'] = 1", "s3 cp {} s3://task-preprocessing-512-oregon/visualizations/\".format(pickle_dir), shell=True) except: subprocess.call(\"sudo cp {} /home/ubuntu/s3/visualizations/\".format(pickle_dir),", "############## cfg = utils.load_config( CONFIG_DIR, nopause=True ) RuntimeDeterminedEnviromentVars.register_dict( cfg )", "( input_batch, target_batch, data_idx, predicted, loss, ) = training_runners['sess'].run( [", "from sklearn.decomposition import PCA x = np.zeros((32,256,256,3), dtype='float') for i", "x ############## Clean Up ############## training_runners[ 'coord' ].request_stop() training_runners[ 'coord'", "in list_of_tasks: if task in all_outputs: print(\"{} already exists....\\n\\n\\n\".format(task)) continue", "utils.load_config( CONFIG_DIR, nopause=True ) RuntimeDeterminedEnviromentVars.register_dict( cfg ) split_file = cfg['test_filenames']", ") inputs = utils.setup_input( cfg, is_training=ON_TEST_SET, use_filename_queue=False ) # is_training", "] except: pass sys.path = remove_dups(sys.path) print(\"FINISHED: {}\\n\\n\\n\\n\\n\\n\".format(task)) pickle_dir =", "import os if os.path.isfile(pickle_dir): with open( pickle_dir, 'rb') as fp:", "general_utils from general_utils import RuntimeDeterminedEnviromentVars import models.architectures as architectures from", "= cfg['test_filenames'] if ON_TEST_SET else cfg['val_filenames'] cfg['train_filenames'] = split_file cfg['val_filenames']", "{}\\n\\n\\n\\n\\n\\n\".format(task)) pickle_dir = 'viz_output_single_task.pkl' with open( pickle_dir, 'wb') as fp:", "cfg['model_path'] is None: continue ############## Set Up Inputs ############## #", "as fp: pickle.dump(all_outputs, fp) try: subprocess.call(\"aws s3 cp {} s3://task-preprocessing-512-oregon/visualizations/\".format(pickle_dir),", "data_idx, predicted, loss, ) = training_runners['sess'].run( [ m.input_images, m.targets, model[", "= split_file cfg['test_filenames'] = split_file cfg['num_epochs'] = 1 cfg['randomize'] =", "inputs[ 'max_steps' ], is_training=False ) ############## Set Up Model ##############", "].restore( training_runners[ 'sess' ], cfg[ 'model_path' ] ) ############## Start", "PCA(n_components=3) pca.fit(embedding_flattened) lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim = (lower_dim - lower_dim.min())", "= set() seen_add = seen.add return [x for x in", "and paths ############## tf.reset_default_graph() training_runners['sess'].close() try: del sys.modules[ 'config' ]", "os.path.join( cfg['log_root'], task, 'model.permanent-ckpt' ) print( cfg['model_path']) if cfg['model_path'] is", "of models to run in parallel', type=int) parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out", "- lower_dim.min()) / (lower_dim.max() - lower_dim.min()) x[i] = lower_dim predicted", "pairs[idx_to_run:idx_to_run+1] def run_to_task(task_to): import general_utils from general_utils import RuntimeDeterminedEnviromentVars import", "data.load_ops import resize_rescale_image import utils from data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction import", "s3://task-preprocessing-512-oregon/visualizations/\".format(pickle_dir), shell=True) except: subprocess.call(\"sudo cp {} /home/ubuntu/s3/visualizations/\".format(pickle_dir), shell=True) return if", "if idx_to_run == -1: pairs_to_run = pairs else: pairs_to_run =", "RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg ) RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time = time.time() # utils.print_start_info(", "utils.get_data_prefetch_threads_init_fn( inputs, cfg, is_training=ON_TEST_SET, use_filename_queue=False ) prefetch_threads = threading.Thread( target=data_prefetch_init_fn,", "/home/ubuntu/s3/model_log\".format(d=pickle_dir)) ############## Reset graph and paths ############## tf.reset_default_graph() training_runners['sess'].close() try:", "cfg['train_filenames'] = split_file cfg['val_filenames'] = split_file cfg['test_filenames'] = split_file cfg['num_epochs']", "x = np.zeros((32,256,256,3), dtype='float') for i in range(predicted.shape[0]): embedding_flattened =", "sys.modules[ 'config' ] except: pass sys.path = remove_dups(sys.path) print(\"FINISHED: {}\\n\\n\\n\\n\\n\\n\".format(task))", "'w') # Restore def enablePrint(): sys.stdout = ipython_std_out # Force", "room_layout segment25d segment2d vanishing_point_well_defined segmentsemantic_rb class_selected class_1000\" list_of_tasks = target_tasks.split(\"", "cfg['root_dir'] cfg['num_read_threads'] = 1 print(cfg['log_root']) if task == 'jigsaw': continue", "Store to dict ############## to_store = { 'input': input_batch, 'target':", "sys import tensorflow as tf import tensorflow.contrib.slim as slim import", "in seen or seen_add(x))] pairs = list(itertools.product(list_of_tasks, list_of_tasks)) args =", "tf.InteractiveSession(), 'coord': tf.train.Coordinator() } # task = '{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs)", "division, print_function import argparse import importlib import itertools import time", "return if __name__ == '__main__': run_to_task(None) # with Pool(args.n_parallel) as", "utils from data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction import lib.data.load_ops as load_ops tf.logging.set_verbosity(tf.logging.ERROR)", "seq if not (x in seen or seen_add(x))] pairs =", "], m.decoder_output, m.total_loss] ) mask_batch = 1. else: ( input_batch,", "input_batch, target_batch, data_idx, predicted, loss, ) = training_runners['sess'].run( [ m.input_images,", "/home/ubuntu/s3/visualizations/\".format(pickle_dir), shell=True) return if __name__ == '__main__': run_to_task(None) # with", "= sys.stdout # Disabe def blockPrint(): sys.stdout = open(os.devnull, 'w')", "= utils.setup_input( cfg, is_training=ON_TEST_SET, use_filename_queue=False ) # is_training determines whether", "############## Set Up Model ############## model = utils.setup_model( inputs, cfg,", "############## # tf.logging.set_verbosity( tf.logging.INFO ) inputs = utils.setup_input( cfg, is_training=ON_TEST_SET,", "import tensorflow as tf import tensorflow.contrib.slim as slim import threading", "args=( training_runners[ 'sess' ], training_runners[ 'coord' ] )) prefetch_threads.start() ##############", "'target': target_batch, 'mask': mask_batch, 'data_idx':data_idx, 'output':predicted} all_outputs[task] = to_store print(\"Done:", "= training_runners['sess'].run( [ m.input_images, m.targets, m.masks, model[ 'data_idxs' ], m.decoder_output,", "import numpy as np import os import pdb import pickle", "remove_dups(seq): seen = set() seen_add = seen.add return [x for", "os.system(\"sudo cp {d} /home/ubuntu/s3/model_log\".format(d=pickle_dir)) ############## Reset graph and paths ##############", "utils.setup_input( cfg, is_training=ON_TEST_SET, use_filename_queue=False ) # is_training determines whether to", "Clean Up ############## training_runners[ 'coord' ].request_stop() training_runners[ 'coord' ].join() #", "'input': input_batch, 'target': target_batch, 'mask': mask_batch, 'data_idx':data_idx, 'output':predicted} all_outputs[task] =", "dict ############## to_store = { 'input': input_batch, 'target': target_batch, 'mask':", "curvature denoise edge2d edge3d ego_motion fix_pose impainting_whole jigsaw keypoint2d keypoint3d", ") mask_batch = 1. else: ( input_batch, target_batch, mask_batch, data_idx,", "= importlib.reload(general_utils) tf.reset_default_graph() training_runners = { 'sess': tf.InteractiveSession(), 'coord': tf.train.Coordinator()", "pdb import pickle import subprocess import sys import tensorflow as", "run_to_task(task_to): import general_utils from general_utils import RuntimeDeterminedEnviromentVars import models.architectures as", "try: del sys.modules[ 'config' ] except: pass sys.path = remove_dups(sys.path)", "rgb2mist rgb2sfnorm room_layout segment25d segment2d vanishing_point_well_defined segmentsemantic_rb class_selected class_1000\" list_of_tasks", "class_selected class_1000\" list_of_tasks = target_tasks.split(\" \") ON_TEST_SET = True IN_TRAIN_MODE", "# os.system(\"sudo cp {d} /home/ubuntu/s3/model_log\".format(d=pickle_dir)) ############## Reset graph and paths", "training_runners['sess'].close() try: del sys.modules[ 'config' ] except: pass sys.path =", "'model_path' ] ) ############## Start dataloading workers ############## data_prefetch_init_fn =", "loss, ) = training_runners['sess'].run( [ m.input_images, m.targets, m.masks, model[ 'data_idxs'", ") = training_runners['sess'].run( [ m.input_images, m.targets, model[ 'data_idxs' ], m.decoder_output,", "parser.add_argument('--idx', dest='idx', help='Task to run', type=int) parser.add_argument('--hs', dest='hs', help='Hidden size", "in parallel', type=int) parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out = sys.stdout # Disabe", "= cfg['root_dir'] cfg['num_read_threads'] = 1 print(cfg['log_root']) if task == 'jigsaw':", "del sys.modules[ 'config' ] except: pass sys.path = remove_dups(sys.path) print(\"FINISHED:", "shell=True) except: subprocess.call(\"sudo cp {} /home/ubuntu/s3/visualizations/\".format(pickle_dir), shell=True) return if __name__", "# tf.logging.set_verbosity( tf.logging.INFO ) inputs = utils.setup_input( cfg, is_training=ON_TEST_SET, use_filename_queue=False", "all_outputs[task] = to_store print(\"Done: {}\".format(task)) # os.system(\"sudo cp {d} /home/ubuntu/s3/model_log\".format(d=pickle_dir))", "{task}\".format(task=task)) general_utils = importlib.reload(general_utils) tf.reset_default_graph() training_runners = { 'sess': tf.InteractiveSession(),", "training_runners[ 'coord' ].join() # if os.path.isfile(pickle_dir): # with open(pickle_dir, 'rb')", "############## Run First Batch ############## if not hasattr(m, 'masks'): (", "use_filename_queue=False ) prefetch_threads = threading.Thread( target=data_prefetch_init_fn, args=( training_runners[ 'sess' ],", "'model.permanent-ckpt' ) print( cfg['model_path']) if cfg['model_path'] is None: continue ##############", ") m = model[ 'model' ] model[ 'saver_op' ].restore( training_runners[", "-1: pairs_to_run = pairs else: pairs_to_run = pairs[idx_to_run:idx_to_run+1] def run_to_task(task_to):", "= parser.parse_args() idx_to_run = args.idx if idx_to_run == -1: pairs_to_run", "tf.reset_default_graph() training_runners['sess'].close() try: del sys.modules[ 'config' ] except: pass sys.path", "= '{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs) CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ############## Load Configs", "IN_TRAIN_MODE = False parser = argparse.ArgumentParser(description='Viz Single Task') parser.add_argument('--idx', dest='idx',", "RuntimeDeterminedEnviromentVars.register_dict( cfg ) split_file = cfg['test_filenames'] if ON_TEST_SET else cfg['val_filenames']", "= 'viz_output_single_task.pkl' with open( pickle_dir, 'wb') as fp: pickle.dump(all_outputs, fp)", "as fp: all_outputs = pickle.load(fp) for task in list_of_tasks: if", "in seq if not (x in seen or seen_add(x))] pairs", "if __name__ == '__main__': run_to_task(None) # with Pool(args.n_parallel) as p:", "type=int) parser.add_argument('--n-parallel', dest='n_parallel', help='Number of models to run in parallel',", "Configs ############## cfg = utils.load_config( CONFIG_DIR, nopause=True ) RuntimeDeterminedEnviromentVars.register_dict( cfg", "itertools import time from multiprocessing import Pool import numpy as", "argparse.ArgumentParser(description='Viz Single Task') parser.add_argument('--idx', dest='idx', help='Task to run', type=int) parser.add_argument('--hs',", "if not (x in seen or seen_add(x))] pairs = list(itertools.product(list_of_tasks,", "for task in list_of_tasks: if task in all_outputs: print(\"{} already", "import utils from data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction import lib.data.load_ops as load_ops", "= False parser = argparse.ArgumentParser(description='Viz Single Task') parser.add_argument('--idx', dest='idx', help='Task", "pass sys.path = remove_dups(sys.path) print(\"FINISHED: {}\\n\\n\\n\\n\\n\\n\".format(task)) pickle_dir = 'viz_output_single_task.pkl' with", "import importlib import itertools import time from multiprocessing import Pool", "= pairs[idx_to_run:idx_to_run+1] def run_to_task(task_to): import general_utils from general_utils import RuntimeDeterminedEnviromentVars", "############## to_store = { 'input': input_batch, 'target': target_batch, 'mask': mask_batch,", "except: pass sys.path = remove_dups(sys.path) print(\"FINISHED: {}\\n\\n\\n\\n\\n\\n\".format(task)) pickle_dir = 'viz_output_single_task.pkl'", "to_store = { 'input': input_batch, 'target': target_batch, 'mask': mask_batch, 'data_idx':data_idx,", "argparse import importlib import itertools import time from multiprocessing import", "root_dir = cfg['root_dir'] cfg['num_read_threads'] = 1 print(cfg['log_root']) if task ==", "<gh_stars>100-1000 from __future__ import absolute_import, division, print_function import argparse import", "time from multiprocessing import Pool import numpy as np import", "importlib import itertools import time from multiprocessing import Pool import", "or seen_add(x))] pairs = list(itertools.product(list_of_tasks, list_of_tasks)) args = parser.parse_args() idx_to_run", "keypoint2d keypoint3d non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm room_layout segment25d", "importlib.reload(general_utils) tf.reset_default_graph() training_runners = { 'sess': tf.InteractiveSession(), 'coord': tf.train.Coordinator() }", "], is_training=False ) ############## Set Up Model ############## model =", "model[ 'model' ] model[ 'saver_op' ].restore( training_runners[ 'sess' ], cfg[", "data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn( inputs, cfg, is_training=ON_TEST_SET, use_filename_queue=False ) prefetch_threads =", "x in seq if not (x in seen or seen_add(x))]", "idx_to_run == -1: pairs_to_run = pairs else: pairs_to_run = pairs[idx_to_run:idx_to_run+1]", ") if task == 'segment2d' or task == 'segment25d': from", "input_batch, 'target': target_batch, 'mask': mask_batch, 'data_idx':data_idx, 'output':predicted} all_outputs[task] = to_store", "os.path.isfile(pickle_dir): # with open(pickle_dir, 'rb') as fp: # all_outputs =", "cfg = utils.load_config( CONFIG_DIR, nopause=True ) RuntimeDeterminedEnviromentVars.register_dict( cfg ) split_file", "data_idx, predicted, loss, ) = training_runners['sess'].run( [ m.input_images, m.targets, m.masks,", "target=data_prefetch_init_fn, args=( training_runners[ 'sess' ], training_runners[ 'coord' ] )) prefetch_threads.start()", "training_runners[ 'sess' ], cfg[ 'model_path' ] ) ############## Start dataloading", "= True IN_TRAIN_MODE = False parser = argparse.ArgumentParser(description='Viz Single Task')", "1 print(cfg['log_root']) if task == 'jigsaw': continue cfg['model_path'] = os.path.join(", "fp: # all_outputs = pickle.load(fp) ############## Store to dict ##############", "dataloading workers ############## data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn( inputs, cfg, is_training=ON_TEST_SET, use_filename_queue=False", "import threading import init_paths from models.sample_models import * target_tasks =", "impainting_whole jigsaw keypoint2d keypoint3d non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm", "'masks'): ( input_batch, target_batch, data_idx, predicted, loss, ) = training_runners['sess'].run(", "= (lower_dim - lower_dim.min()) / (lower_dim.max() - lower_dim.min()) x[i] =", "general_utils = importlib.reload(general_utils) tf.reset_default_graph() training_runners = { 'sess': tf.InteractiveSession(), 'coord':", "'viz_output_single_task.pkl' import os if os.path.isfile(pickle_dir): with open( pickle_dir, 'rb') as", "def enablePrint(): sys.stdout = ipython_std_out # Force Print def forcePrint(str):", "parser = argparse.ArgumentParser(description='Viz Single Task') parser.add_argument('--idx', dest='idx', help='Task to run',", "== 'jigsaw': continue cfg['model_path'] = os.path.join( cfg['log_root'], task, 'model.permanent-ckpt' )", "CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ############## Load Configs ############## cfg = utils.load_config(", "edge2d edge3d ego_motion fix_pose impainting_whole jigsaw keypoint2d keypoint3d non_fixated_pose point_match", "nopause=True ) RuntimeDeterminedEnviromentVars.register_dict( cfg ) split_file = cfg['test_filenames'] if ON_TEST_SET", "model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) mask_batch = 1. else:", "= { 'sess': tf.InteractiveSession(), 'coord': tf.train.Coordinator() } # task =", "prefetch_threads.start() ############## Run First Batch ############## if not hasattr(m, 'masks'):", "t=task_to, hs=args.hs) CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ############## Load Configs ############## cfg", "\"autoencoder colorization curvature denoise edge2d edge3d ego_motion fix_pose impainting_whole jigsaw", "import sys import tensorflow as tf import tensorflow.contrib.slim as slim", "sys.stdout = ipython_std_out # Force Print def forcePrint(str): enablePrint() print(str)", "colorization curvature denoise edge2d edge3d ego_motion fix_pose impainting_whole jigsaw keypoint2d", "= np.squeeze(predicted[i]).reshape((-1,64)) pca = PCA(n_components=3) pca.fit(embedding_flattened) lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim", "as np import os import pdb import pickle import subprocess", "jigsaw keypoint2d keypoint3d non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm room_layout", "keypoint3d non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm room_layout segment25d segment2d", "split_file cfg['num_epochs'] = 1 cfg['randomize'] = False root_dir = cfg['root_dir']", "to run', type=int) parser.add_argument('--hs', dest='hs', help='Hidden size to use', type=int)", "type=int) parser.add_argument('--hs', dest='hs', help='Hidden size to use', type=int) parser.add_argument('--n-parallel', dest='n_parallel',", "if os.path.isfile(pickle_dir): with open( pickle_dir, 'rb') as fp: all_outputs =", "'viz_output_single_task.pkl' with open( pickle_dir, 'wb') as fp: pickle.dump(all_outputs, fp) try:", "is_training=IN_TRAIN_MODE ) m = model[ 'model' ] model[ 'saver_op' ].restore(", "already exists....\\n\\n\\n\".format(task)) continue print(\"Doing {task}\".format(task=task)) general_utils = importlib.reload(general_utils) tf.reset_default_graph() training_runners", ") ############## Start dataloading workers ############## data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn( inputs,", "os.path.isfile(pickle_dir): with open( pickle_dir, 'rb') as fp: all_outputs = pickle.load(fp)", "Force Print def forcePrint(str): enablePrint() print(str) sys.stdout.flush() blockPrint() def remove_dups(seq):", "subprocess.call(\"aws s3 cp {} s3://task-preprocessing-512-oregon/visualizations/\".format(pickle_dir), shell=True) except: subprocess.call(\"sudo cp {}", "= '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ############## Load Configs ############## cfg = utils.load_config( CONFIG_DIR,", "= { 'input': input_batch, 'target': target_batch, 'mask': mask_batch, 'data_idx':data_idx, 'output':predicted}", "all_outputs: print(\"{} already exists....\\n\\n\\n\".format(task)) continue print(\"Doing {task}\".format(task=task)) general_utils = importlib.reload(general_utils)", ") ############## Set Up Model ############## model = utils.setup_model( inputs,", "inputs, cfg, is_training=ON_TEST_SET, use_filename_queue=False ) prefetch_threads = threading.Thread( target=data_prefetch_init_fn, args=(", "print(str) sys.stdout.flush() blockPrint() def remove_dups(seq): seen = set() seen_add =", "to_store print(\"Done: {}\".format(task)) # os.system(\"sudo cp {d} /home/ubuntu/s3/model_log\".format(d=pickle_dir)) ############## Reset", "import resize_rescale_image import utils from data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction import lib.data.load_ops", "print( cfg['model_path']) if cfg['model_path'] is None: continue ############## Set Up", "Load Configs ############## cfg = utils.load_config( CONFIG_DIR, nopause=True ) RuntimeDeterminedEnviromentVars.register_dict(", "# task = '{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs) CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ##############", "cfg ) RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time = time.time() # utils.print_start_info( cfg, inputs[", "__name__ == '__main__': run_to_task(None) # with Pool(args.n_parallel) as p: #", "{} pickle_dir = 'viz_output_single_task.pkl' import os if os.path.isfile(pickle_dir): with open(", "tensorflow.contrib.slim as slim import threading import init_paths from models.sample_models import", "lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim = (lower_dim - lower_dim.min()) / (lower_dim.max()", "'mask': mask_batch, 'data_idx':data_idx, 'output':predicted} all_outputs[task] = to_store print(\"Done: {}\".format(task)) #", "ON_TEST_SET else cfg['val_filenames'] cfg['train_filenames'] = split_file cfg['val_filenames'] = split_file cfg['test_filenames']", "'model' ] model[ 'saver_op' ].restore( training_runners[ 'sess' ], cfg[ 'model_path'", "help='Hidden size to use', type=int) parser.add_argument('--n-parallel', dest='n_parallel', help='Number of models", "task == 'segment2d' or task == 'segment25d': from sklearn.decomposition import", "target_batch, mask_batch, data_idx, predicted, loss, ) = training_runners['sess'].run( [ m.input_images,", "cfg['model_path'] = os.path.join( cfg['log_root'], task, 'model.permanent-ckpt' ) print( cfg['model_path']) if", "# Force Print def forcePrint(str): enablePrint() print(str) sys.stdout.flush() blockPrint() def", "model = utils.setup_model( inputs, cfg, is_training=IN_TRAIN_MODE ) m = model[", "############## Clean Up ############## training_runners[ 'coord' ].request_stop() training_runners[ 'coord' ].join()", "Restore def enablePrint(): sys.stdout = ipython_std_out # Force Print def", "cfg ) split_file = cfg['test_filenames'] if ON_TEST_SET else cfg['val_filenames'] cfg['train_filenames']", "= np.zeros((32,256,256,3), dtype='float') for i in range(predicted.shape[0]): embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64))", "start_time = time.time() # utils.print_start_info( cfg, inputs[ 'max_steps' ], is_training=False", "os if os.path.isfile(pickle_dir): with open( pickle_dir, 'rb') as fp: all_outputs", "threading.Thread( target=data_prefetch_init_fn, args=( training_runners[ 'sess' ], training_runners[ 'coord' ] ))", "def forcePrint(str): enablePrint() print(str) sys.stdout.flush() blockPrint() def remove_dups(seq): seen =", "np import os import pdb import pickle import subprocess import", "cfg, is_training=IN_TRAIN_MODE ) m = model[ 'model' ] model[ 'saver_op'", "parallel', type=int) parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out = sys.stdout # Disabe def", "import PCA x = np.zeros((32,256,256,3), dtype='float') for i in range(predicted.shape[0]):", "inputs, cfg, is_training=IN_TRAIN_MODE ) m = model[ 'model' ] model[", "m.decoder_output, m.total_loss] ) if task == 'segment2d' or task ==", "'{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs) CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ############## Load Configs ##############", "pairs_to_run = pairs else: pairs_to_run = pairs[idx_to_run:idx_to_run+1] def run_to_task(task_to): import", "is_training=ON_TEST_SET, use_filename_queue=False ) prefetch_threads = threading.Thread( target=data_prefetch_init_fn, args=( training_runners[ 'sess'", "pca.fit(embedding_flattened) lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim = (lower_dim - lower_dim.min()) /", "utils.setup_model( inputs, cfg, is_training=IN_TRAIN_MODE ) m = model[ 'model' ]", "############## data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn( inputs, cfg, is_training=ON_TEST_SET, use_filename_queue=False ) prefetch_threads", "except: subprocess.call(\"sudo cp {} /home/ubuntu/s3/visualizations/\".format(pickle_dir), shell=True) return if __name__ ==", "cfg, inputs[ 'max_steps' ], is_training=False ) ############## Set Up Model", "cp {} /home/ubuntu/s3/visualizations/\".format(pickle_dir), shell=True) return if __name__ == '__main__': run_to_task(None)", "tf.reset_default_graph() training_runners = { 'sess': tf.InteractiveSession(), 'coord': tf.train.Coordinator() } #", "parser.add_argument('--n-parallel', dest='n_parallel', help='Number of models to run in parallel', type=int)", "print(\"{} already exists....\\n\\n\\n\".format(task)) continue print(\"Doing {task}\".format(task=task)) general_utils = importlib.reload(general_utils) tf.reset_default_graph()", "############## Load Configs ############## cfg = utils.load_config( CONFIG_DIR, nopause=True )", "continue print(\"Doing {task}\".format(task=task)) general_utils = importlib.reload(general_utils) tf.reset_default_graph() training_runners = {", ") split_file = cfg['test_filenames'] if ON_TEST_SET else cfg['val_filenames'] cfg['train_filenames'] =", "tf.logging.set_verbosity(tf.logging.ERROR) all_outputs = {} pickle_dir = 'viz_output_single_task.pkl' import os if", "m.total_loss] ) if task == 'segment2d' or task == 'segment25d':", "for x in seq if not (x in seen or", "= 1 print(cfg['log_root']) if task == 'jigsaw': continue cfg['model_path'] =", "print(cfg['log_root']) if task == 'jigsaw': continue cfg['model_path'] = os.path.join( cfg['log_root'],", "idx_to_run = args.idx if idx_to_run == -1: pairs_to_run = pairs", "pickle.load(fp) for task in list_of_tasks: if task in all_outputs: print(\"{}", "cfg, is_training=ON_TEST_SET, use_filename_queue=False ) prefetch_threads = threading.Thread( target=data_prefetch_init_fn, args=( training_runners[", "'max_steps' ], is_training=False ) ############## Set Up Model ############## model", "models.sample_models import * target_tasks = \"autoencoder colorization curvature denoise edge2d", "print_function import argparse import importlib import itertools import time from", "denoise edge2d edge3d ego_motion fix_pose impainting_whole jigsaw keypoint2d keypoint3d non_fixated_pose", "segment2d vanishing_point_well_defined segmentsemantic_rb class_selected class_1000\" list_of_tasks = target_tasks.split(\" \") ON_TEST_SET", "Task') parser.add_argument('--idx', dest='idx', help='Task to run', type=int) parser.add_argument('--hs', dest='hs', help='Hidden", "= False root_dir = cfg['root_dir'] cfg['num_read_threads'] = 1 print(cfg['log_root']) if", "= training_runners['sess'].run( [ m.input_images, m.targets, model[ 'data_idxs' ], m.decoder_output, m.total_loss]", "lower_dim = (lower_dim - lower_dim.min()) / (lower_dim.max() - lower_dim.min()) x[i]", "import load_and_specify_preprocessors_for_representation_extraction import lib.data.load_ops as load_ops tf.logging.set_verbosity(tf.logging.ERROR) all_outputs = {}", ") RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time = time.time() # utils.print_start_info( cfg, inputs[ 'max_steps'", "cfg['test_filenames'] = split_file cfg['num_epochs'] = 1 cfg['randomize'] = False root_dir", "to use', type=int) parser.add_argument('--n-parallel', dest='n_parallel', help='Number of models to run", "(x in seen or seen_add(x))] pairs = list(itertools.product(list_of_tasks, list_of_tasks)) args", "from data.load_ops import resize_rescale_image import utils from data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction", "[ m.input_images, m.targets, m.masks, model[ 'data_idxs' ], m.decoder_output, m.total_loss] )", "= open(os.devnull, 'w') # Restore def enablePrint(): sys.stdout = ipython_std_out", "{} /home/ubuntu/s3/visualizations/\".format(pickle_dir), shell=True) return if __name__ == '__main__': run_to_task(None) #", "predicted = x ############## Clean Up ############## training_runners[ 'coord' ].request_stop()", "pickle.load(fp) ############## Store to dict ############## to_store = { 'input':", "== 'segment2d' or task == 'segment25d': from sklearn.decomposition import PCA", "m.input_images, m.targets, m.masks, model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) if", "'output':predicted} all_outputs[task] = to_store print(\"Done: {}\".format(task)) # os.system(\"sudo cp {d}", "= to_store print(\"Done: {}\".format(task)) # os.system(\"sudo cp {d} /home/ubuntu/s3/model_log\".format(d=pickle_dir)) ##############", "= pickle.load(fp) for task in list_of_tasks: if task in all_outputs:", "as tf import tensorflow.contrib.slim as slim import threading import init_paths", "fp) try: subprocess.call(\"aws s3 cp {} s3://task-preprocessing-512-oregon/visualizations/\".format(pickle_dir), shell=True) except: subprocess.call(\"sudo", "list_of_tasks)) args = parser.parse_args() idx_to_run = args.idx if idx_to_run ==", "Model ############## model = utils.setup_model( inputs, cfg, is_training=IN_TRAIN_MODE ) m", "############## Store to dict ############## to_store = { 'input': input_batch,", "] )) prefetch_threads.start() ############## Run First Batch ############## if not", "= target_tasks.split(\" \") ON_TEST_SET = True IN_TRAIN_MODE = False parser", "], cfg[ 'model_path' ] ) ############## Start dataloading workers ##############", "with open( pickle_dir, 'wb') as fp: pickle.dump(all_outputs, fp) try: subprocess.call(\"aws", "class_1000\" list_of_tasks = target_tasks.split(\" \") ON_TEST_SET = True IN_TRAIN_MODE =", "{ 'sess': tf.InteractiveSession(), 'coord': tf.train.Coordinator() } # task = '{f}__{t}__{hs}'.format(f=task_from,", "import lib.data.load_ops as load_ops tf.logging.set_verbosity(tf.logging.ERROR) all_outputs = {} pickle_dir =", "from multiprocessing import Pool import numpy as np import os", ") RuntimeDeterminedEnviromentVars.register_dict( cfg ) split_file = cfg['test_filenames'] if ON_TEST_SET else", "cfg['log_root'], task, 'model.permanent-ckpt' ) print( cfg['model_path']) if cfg['model_path'] is None:", "if not hasattr(m, 'masks'): ( input_batch, target_batch, data_idx, predicted, loss,", "# Disabe def blockPrint(): sys.stdout = open(os.devnull, 'w') # Restore", "= remove_dups(sys.path) print(\"FINISHED: {}\\n\\n\\n\\n\\n\\n\".format(task)) pickle_dir = 'viz_output_single_task.pkl' with open( pickle_dir,", "'config' ] except: pass sys.path = remove_dups(sys.path) print(\"FINISHED: {}\\n\\n\\n\\n\\n\\n\".format(task)) pickle_dir", "pickle_dir = 'viz_output_single_task.pkl' with open( pickle_dir, 'wb') as fp: pickle.dump(all_outputs,", "with open( pickle_dir, 'rb') as fp: all_outputs = pickle.load(fp) for", "m.targets, m.masks, model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) if task", "ego_motion fix_pose impainting_whole jigsaw keypoint2d keypoint3d non_fixated_pose point_match reshade rgb2depth", "if ON_TEST_SET else cfg['val_filenames'] cfg['train_filenames'] = split_file cfg['val_filenames'] = split_file", "os import pdb import pickle import subprocess import sys import", "( input_batch, target_batch, mask_batch, data_idx, predicted, loss, ) = training_runners['sess'].run(", "train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg ) RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time = time.time() #", "'segment25d': from sklearn.decomposition import PCA x = np.zeros((32,256,256,3), dtype='float') for", "determines whether to use train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg ) RuntimeDeterminedEnviromentVars.populate_registered_variables()", "PCA x = np.zeros((32,256,256,3), dtype='float') for i in range(predicted.shape[0]): embedding_flattened", "parser.add_argument('--hs', dest='hs', help='Hidden size to use', type=int) parser.add_argument('--n-parallel', dest='n_parallel', help='Number", "try: subprocess.call(\"aws s3 cp {} s3://task-preprocessing-512-oregon/visualizations/\".format(pickle_dir), shell=True) except: subprocess.call(\"sudo cp", "sys.stdout # Disabe def blockPrint(): sys.stdout = open(os.devnull, 'w') #", "= argparse.ArgumentParser(description='Viz Single Task') parser.add_argument('--idx', dest='idx', help='Task to run', type=int)", "CONFIG_DIR, nopause=True ) RuntimeDeterminedEnviromentVars.register_dict( cfg ) split_file = cfg['test_filenames'] if", "ON_TEST_SET = True IN_TRAIN_MODE = False parser = argparse.ArgumentParser(description='Viz Single", "seen_add(x))] pairs = list(itertools.product(list_of_tasks, list_of_tasks)) args = parser.parse_args() idx_to_run =", "or task == 'segment25d': from sklearn.decomposition import PCA x =", "np.zeros((32,256,256,3), dtype='float') for i in range(predicted.shape[0]): embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64)) pca", "First Batch ############## if not hasattr(m, 'masks'): ( input_batch, target_batch,", "], training_runners[ 'coord' ] )) prefetch_threads.start() ############## Run First Batch", "\") ON_TEST_SET = True IN_TRAIN_MODE = False parser = argparse.ArgumentParser(description='Viz", "task, 'model.permanent-ckpt' ) print( cfg['model_path']) if cfg['model_path'] is None: continue", "ipython_std_out = sys.stdout # Disabe def blockPrint(): sys.stdout = open(os.devnull,", "# is_training determines whether to use train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg", "absolute_import, division, print_function import argparse import importlib import itertools import", "time.time() # utils.print_start_info( cfg, inputs[ 'max_steps' ], is_training=False ) ##############", "edge3d ego_motion fix_pose impainting_whole jigsaw keypoint2d keypoint3d non_fixated_pose point_match reshade", "= pairs else: pairs_to_run = pairs[idx_to_run:idx_to_run+1] def run_to_task(task_to): import general_utils", "/ (lower_dim.max() - lower_dim.min()) x[i] = lower_dim predicted = x", "if task == 'jigsaw': continue cfg['model_path'] = os.path.join( cfg['log_root'], task,", "############## training_runners[ 'coord' ].request_stop() training_runners[ 'coord' ].join() # if os.path.isfile(pickle_dir):", ") prefetch_threads = threading.Thread( target=data_prefetch_init_fn, args=( training_runners[ 'sess' ], training_runners[", "vanishing_point_well_defined segmentsemantic_rb class_selected class_1000\" list_of_tasks = target_tasks.split(\" \") ON_TEST_SET =", "sklearn.decomposition import PCA x = np.zeros((32,256,256,3), dtype='float') for i in", "tf import tensorflow.contrib.slim as slim import threading import init_paths from", "= model[ 'model' ] model[ 'saver_op' ].restore( training_runners[ 'sess' ],", "target_batch, data_idx, predicted, loss, ) = training_runners['sess'].run( [ m.input_images, m.targets,", "lower_dim.min()) x[i] = lower_dim predicted = x ############## Clean Up", "{}\".format(task)) # os.system(\"sudo cp {d} /home/ubuntu/s3/model_log\".format(d=pickle_dir)) ############## Reset graph and", "is None: continue ############## Set Up Inputs ############## # tf.logging.set_verbosity(", "m = model[ 'model' ] model[ 'saver_op' ].restore( training_runners[ 'sess'", "point_match reshade rgb2depth rgb2mist rgb2sfnorm room_layout segment25d segment2d vanishing_point_well_defined segmentsemantic_rb", "= pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim = (lower_dim - lower_dim.min()) / (lower_dim.max() -", "'coord' ] )) prefetch_threads.start() ############## Run First Batch ############## if", "exists....\\n\\n\\n\".format(task)) continue print(\"Doing {task}\".format(task=task)) general_utils = importlib.reload(general_utils) tf.reset_default_graph() training_runners =", "cfg['model_path']) if cfg['model_path'] is None: continue ############## Set Up Inputs", "mask_batch, 'data_idx':data_idx, 'output':predicted} all_outputs[task] = to_store print(\"Done: {}\".format(task)) # os.system(\"sudo", "for i in range(predicted.shape[0]): embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64)) pca = PCA(n_components=3)", "if os.path.isfile(pickle_dir): # with open(pickle_dir, 'rb') as fp: # all_outputs", "x[i] = lower_dim predicted = x ############## Clean Up ##############", "loss, ) = training_runners['sess'].run( [ m.input_images, m.targets, model[ 'data_idxs' ],", "'rb') as fp: # all_outputs = pickle.load(fp) ############## Store to", "############## if not hasattr(m, 'masks'): ( input_batch, target_batch, data_idx, predicted,", "import absolute_import, division, print_function import argparse import importlib import itertools", "training_runners = { 'sess': tf.InteractiveSession(), 'coord': tf.train.Coordinator() } # task", "hs=args.hs) CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ############## Load Configs ############## cfg =", "is_training=ON_TEST_SET, use_filename_queue=False ) # is_training determines whether to use train/validaiton", "pairs else: pairs_to_run = pairs[idx_to_run:idx_to_run+1] def run_to_task(task_to): import general_utils from", "pairs_to_run = pairs[idx_to_run:idx_to_run+1] def run_to_task(task_to): import general_utils from general_utils import", "from data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction import lib.data.load_ops as load_ops tf.logging.set_verbosity(tf.logging.ERROR) all_outputs", "paths ############## tf.reset_default_graph() training_runners['sess'].close() try: del sys.modules[ 'config' ] except:", "# Restore def enablePrint(): sys.stdout = ipython_std_out # Force Print", "'jigsaw': continue cfg['model_path'] = os.path.join( cfg['log_root'], task, 'model.permanent-ckpt' ) print(", "Pool import numpy as np import os import pdb import", "import models.architectures as architectures from data.load_ops import resize_rescale_image import utils", "[x for x in seq if not (x in seen", "inputs = utils.setup_input( cfg, is_training=ON_TEST_SET, use_filename_queue=False ) # is_training determines", "pickle_dir, 'rb') as fp: all_outputs = pickle.load(fp) for task in", "__future__ import absolute_import, division, print_function import argparse import importlib import", "inputs, cfg ) RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time = time.time() # utils.print_start_info( cfg,", "############## Reset graph and paths ############## tf.reset_default_graph() training_runners['sess'].close() try: del", "enablePrint() print(str) sys.stdout.flush() blockPrint() def remove_dups(seq): seen = set() seen_add", "split_file cfg['test_filenames'] = split_file cfg['num_epochs'] = 1 cfg['randomize'] = False", "= utils.setup_model( inputs, cfg, is_training=IN_TRAIN_MODE ) m = model[ 'model'", "dest='hs', help='Hidden size to use', type=int) parser.add_argument('--n-parallel', dest='n_parallel', help='Number of", "= lower_dim predicted = x ############## Clean Up ############## training_runners[" ]
[ "-e %s ]] || cd $(GENDIR)\" % proto_src_loc) gendir_include =", "Analogous to cc_library include_prefix argument. strip_include_prefix: Analogous to cc_library strip_include_prefix", "protoc_srcs_set, outs = pb_outs, tools = tools, cmd = \"", "= None, data = None, testonly = None, textual_hdrs =", "\"lib/stratum\", symlinks = \"dereference\", )], default = [], ) native.Fileset(", "host builds. ppc: The result of the alias for ppc", "different ppc toolchain for Stratum. # This means that we", "= ALL_ARCHES defs_plus = (defines or []) + _ARCH_DEFINES cc_binary(", "steps. # This is more general than it may seem:", "This means that we must provide portable shared libs for", "for host builds. ppc: The result of the alias for", "= [HOST_ARCH] ALL_ARCHES = EMBEDDED_ARCHES + HOST_ARCHES # Identify Stratum", "to avoid generating # warnings. accum_flags.append( \"$$(if [[ -e $(GENDIR)/%s", "to build multi-arch library from Message protobuf(s). For library \"name\",", "else None, x86 = decorate(name, \"x86\") if \"x86\" in arches", "\"@com_google_protobuf//:protobuf\" _SC_GRPC_PLUGIN = \"//sandblaze/prebuilt/protobuf:grpc_cpp_plugin\" _GRPC_PLUGIN = \"//grpc:grpc_cpp_plugin\" def _loc(target): \"\"\"Return", "Standard blaze testonly parameter. proto_include: Path to add to include", "an sc_proto_lib with python support.\") _gen_py_proto_lib( name = name, srcs", "gen_grpc_pb_h = gen_stem + \".grpc.pb.h\" gen_grpc_pb_cc = gen_stem + \".grpc.pb.cc\"", "step, because our rollup command # might be generated on", "data filesets, mapped to bin/ and share/ respectively. * ${name}_${arch}_tarball", "% _loc(protoc_label) grpc_plugin = \"$${g3}/%s\" % _loc(grpc_plugin) cpp_out = \"$${g3}/$(GENDIR)/%s/%s\"", "that depend on this rule. Typically \".\" python_support: Defaults to", "as data at runtime (host builds only). testonly: Standard blaze", "argument. \"\"\" cc_test( name = name, size = size or", "copts = [], includes = includes, testonly = testonly, textual_hdrs", "None, linkopts = None, visibility = None): \"\"\"Creates a cc_test", "arches: arches = ALL_ARCHES defs_plus = (defines or []) +", "% \"X86\", include_fmt % \"x86\", \"#elif defined(STRATUM_ARCH_%s)\" % \"HOST\", include_fmt", "For library \"name\", generates: * ${name}_shim aka .pb.h master switch", "system for Stratum P4 switch stack. To use this, load()", "with a user defined configuration fragment would be a much", "this package and all dependency packages. * ${name}_${arch} fileset containing", "Add any platform specific files to the final tarball. platform_entries", "= visibility, testonly = testonly, proto_include = proto_include, grpc_shim_rule =", "Generates a cc_test rule that doesn't break the build when", "hdrs + protoc_deps + protobuf_srcs + [my_proto_rollup]) gen_srcs = []", "contexts to alter a blaze rule based on the target", "= [\"STRATUM_ARCH_PPC\"], x86 = [\"STRATUM_ARCH_X86\"], ) STRATUM_INTERNAL = [ \"//stratum:__subpackages__\",", "parse_label(src) if not filename.endswith(\".proto\"): continue hdr_stem = filename[0:-6] new_hdr_name =", "target: Blaze target name available to this build. Returns: $(location", "then calls sc_cc_lib with same name for each arch; #", "of the binaries and all of the data needed for", "The result of the alias for ppc builds. x86: The", "EMBEDDED_X86 = \"x86\" EMBEDDED_ARCHES = [ EMBEDDED_PPC, EMBEDDED_X86, ] HOST_ARCH", "enable {\"grpc\", \"rpc\"}; Only \"grpc\" is supported. So \"rpc\" and", "[gen_grpc_pb_h, gen_grpc_pb_cc] native.genrule( name = src_arch + \".grpc.pb\", srcs =", "to False. If True, generate a python proto library from", "else: level += 1 result.append(d) return sep.join(result) # Adds a", "difference: you can supply lists of architectures for which they", "| xdeps, [], arches, ), data = sc_platform_filter(data, [], arches),", "python protos work with sc_proto_lib's proto_include field, so we keep", "] proto_rollup_cmds = [\"printf '%%s\\n' %s\" % flag for flag", "arch. Generates a blaze alias that will select the appropriate", "deps, [\"//stratum/portage:dummy_with_main\"], arches, ), srcs = sc_platform_filter(srcs, [], arches), copts", "the alias target. host: The result of the alias for", "_TRACE_SRCS to show sources in embedded sc_cc_lib compile steps. #", "includes = [] if proto_include: includes = [proto_include] # Note:", "lines for shim switch file. # Lines expand inside squotes,", "${src}.proto, generate: :${src}_${arch}.pb rule to run protoc ${src}.proto => ${src}.${arch}.pb.{h,cc}", "decorate(name, \"default_pb\") py_name = decorate(name, \"py\") proto_library( name = regular_proto_name,", "= None, x86 = None, default = None): \"\"\"Public macro", "src_stem) gen_stem = \"%s.%s\" % (src_stem, arch) # We can't", "to keep them different and allow all to be generated", "Library. sc_cc_bin Declare a portable Binary. sc_package Declare a portable", "* ${name}_${arch}_bin and ${name}_${arch}_data filesets containing respectively all of the", "any of {host,ppc,x86} that isn't specified. Returns: The requested selector.", "in srcs: pkg, filename = parse_label(src) if not filename.endswith(\".proto\"): continue", "alias. Returns: Name of shim rule for use in follow-on", "passed through to all filesets. \"\"\" bins = depset(bins or", "alias that will select the appropriate target. If no selection", "to use for host builds. ppc: The value to use", "something along the lines of augmenting context with a user", "= \"{extension_name}\", ) def sc_package( name = None, bins =", "List of other sc_packages to add to this package. arches:", "equivalent. \"\"\" if not arches: if testonly: arches = HOST_ARCHES", "argument. size: Analogous to cc_test size argument. srcs: Analogous to", "rules. testonly: Standard blaze testonly parameter. proto_include: Path to add", "def sc_platform_filter(value, default, arches): return sc_platform_select( host = value if", "proto_rollup_flags + [ \"-I%s\" % protobuf_include, \"--cpp_out=%s\" % cpp_out, proto_src_loc,", "generating # warnings. accum_flags.append( \"$$(if [[ -e $(GENDIR)/%s ]]; then", "load( \"//devtools/build_cleaner/skylark:build_defs.bzl\", \"register_extension_info\", ) load(\"@rules_proto//proto:defs.bzl\", \"proto_library\") load(\"@rules_cc//cc:defs.bzl\", \"cc_binary\", \"cc_library\", \"cc_test\")", "name = name, deps = sc_platform_filter( deps, [\"//stratum/portage:dummy_with_main\"], arches, ),", "tools = tools, cmd = \" && \".join(cmds), heuristic_label_expansion =", "new_hdr_loc = \"$(location %s)\" % new_hdr_name cmds.append(\"{ %s; } >", "outs = [\"%s.tar.gz\" % name] # Copy our files into", "of {host,ppc,x86} that isn't specified. visibility: The visibility of the", "this for program-sizing build \"-g\", # Don't use this for", "available to this build. Returns: $(location target) \"\"\" return \"$(location", "1:] else: print(\"Invalid proto include '%s' doesn't match src %s\"", "% \"x86\", \"#elif defined(STRATUM_ARCH_%s)\" % \"HOST\", include_fmt % \"host\", \"#else\",", "\"name\", generates: * ${name}_shim aka .pb.h master switch - see", "in bins.to_list()]), (\"data\", data), ]: native.Fileset( name = decorate(fileset_name, extension),", "provide portable shared libs for our ppc # executables. ppc", "for x86 builds. default: The value to use for any", "= defs_plus, includes = includes, linkopts = [\"-ldl\", \"-lutil\"], testonly", "can be used in most contexts to alter a blaze", "# Build options for all embedded architectures # # Set", "a `src'. # TODO(unknown): if useful again then inject from", "generates an alias that will select the appropriate proto target", "for depending on normal proto_library rules. def sc_proto_lib( name =", "path.${arch}.pb.h Also generates an alias that will select the appropriate", ".pb.h shims and other portability hacks. _ARCH_DEFINES = sc_platform_select( default", "level += 1 result.append(d) return sep.join(result) # Adds a suffix", "are like cc_library(), proto_library(), and cc_binary(), but with different options", "default), config_label_prefix + \"ppc\": (ppc or default), config_label_prefix + \"x86\":", "os is not allowed in build defs. For example ../../dir/to/deeply/nested/path/../../../other/path", "Binary. sc_package Declare a portable tarball package. and the variables/lists:", "+ \"sc_platform_select. Please add.\") config_label_prefix = \"//stratum:stratum_\" return select({ \"//conditions:default\":", "\"'\" + s + \"'\" # Emulate Python 2.5+ str(startswith([prefix", "None, arches = None, copts = None, defines = None,", "known arches. EMBEDDED_ARCHES All embedded arches. EMBEDDED_PPC Name of PowerPC", "\"grpc\" is supported. So \"rpc\" and \"grpc\" are equivalent. \"\"\"", "and other portability hacks. _ARCH_DEFINES = sc_platform_select( default = [\"STRATUM_ARCH_HOST\"],", "arches = HOST_ARCHES else: arches = ALL_ARCHES service_enable = {", "includes, testonly = testonly, textual_hdrs = grpc_gen_hdrs_plus, visibility = visibility,", "textual_plus | xdeps, [], arches, ), data = sc_platform_filter(data, [],", "% temp_prefix, ] + proto_path_cmds + [ \" \".join([protoc] +", "None, python_support = False, services = []): \"\"\"Public macro to", "compiler invocations. _EMBEDDED_CFLAGS = [ \"-I$(GENDIR)\", ] # Used for", "selector. \"\"\" if default == None and (host == None", "============================================== EMBEDDED_PPC = \"ppc\" EMBEDDED_X86 = \"x86\" EMBEDDED_ARCHES = [", "[]) includes = depset(includes or []) data = depset(data or", "warnings. \"-no-canonical-prefixes\", # Don't mangle paths and confuse blaze. \"-fno-builtin-malloc\",", "name: Analogous to cc_library name argument. deps: Analogous to cc_library", "= \"sc_cc_lib\", label_regex_for_dep = \"{extension_name}\", ) def sc_cc_bin( name, deps", "the appropriate proto target based on the currently selected platform", "= 0 deps = depset(deps or []) srcs = depset(srcs", "Emulate Python 2.5+ str(startswith([prefix ...]) def starts_with(s, prefix_list): for prefix", "= \"$${g3}/protobuf/src\" if arch in EMBEDDED_ARCHES: grpc_plugin = _SC_GRPC_PLUGIN else:", "\"%s/%s\" % (cpp_out, native.package_name()[len(full_proto_include):]) # We do a bit of", "[], arches = [], visibility = None, testonly = None,", "to run protoc w/ erpc plugin: ${src}.proto => ${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup", "if proto_include == \".\": full_proto_include = native.package_name() elif proto_include: full_proto_include", "testonly: Standard blaze testonly argument. \"\"\" regular_proto_name = decorate(name, \"default_pb\")", ") register_extension_info( extension_name = \"sc_cc_lib\", label_regex_for_dep = \"{extension_name}\", ) def", "+ s + '\"' # Adds squotes around a string.", "-e %s ]]; then echo -IG3LOC/%s; fi)\" % (full_proto_include, full_proto_include),", "outs, cmd = \" && \".join(cmds) or \"true\", ) sc_platform_alias(", "or []: if service == \"grpc\": service_enable[\"grpc\"] = 1 elif", "Blaze target name available to this build. Returns: $(location target)", "visibility: Standard blaze visibility parameter. \"\"\" deps = depset(deps or", "testonly argument. \"\"\" regular_proto_name = decorate(name, \"default_pb\") py_name = decorate(name,", "% _loc(grpc_plugin) cpp_out = \"$${g3}/$(GENDIR)/%s/%s\" % (native.package_name(), arch) accum_flags =", "False. If True, generate a python proto library from this", "Declare a portable tarball package. and the variables/lists: ALL_ARCHES All", "with Stratum builds. Generates a cc_test rule that doesn't break", "for Stratum. # This means that we must provide portable", "libraries and binaries are generated for every listed architecture. The", "- see _gen_proto_shims, above. * ${name}_${arch}_pb protobuf compile rules -", "= None, srcs = None, hdrs = None, arches =", "this library for, None => EMBEDDED_ARCHES (HOST_ARCHES not generally supported).", "for arch in arches: _gen_proto_lib( name = name, srcs =", "= decorate(name, \"fs\") for extension, inputs in [ (\"bin\", [\"%s.stripped\"", "if not s.endswith(\".h\")]: alwayslink = 1 if not arches: arches", "2018 Google LLC # Copyright 2018-present Open Networking Foundation #", "will stay invalid. \"\"\" sep = \"/\" level = 0", "= hdr_stem + hdr_ext outs.append(new_hdr_name) # Generate lines for shim", "portable Binary. sc_package Declare a portable tarball package. and the", "plugin: ${src}.proto => ${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup collects include options for protoc:", "List of .proto files - private to this library. hdrs:", "name = decorate(name[:-6], \"grpc_proto\"), pb_modifier = \".grpc.pb\", srcs = srcs", "the input path with minimal use of path-up segments. Invalid", "deps = None, srcs = None, arches = None, copts", "This aspect of the system is suboptimal - something along", "%s\" % flag for flag in accum_flags] proto_rollup_cmds.append(\"cat $(SRCS)\") proto_rollup_cmd", "arches = EMBEDDED_ARCHES fileset_name = decorate(name, \"fs\") for extension, inputs", "\"dereference\", )], default = [], ) native.Fileset( name = fileset_name,", "visibility, ) def _gen_proto_shims(name, pb_modifier, srcs, arches, visibility): \"\"\"Macro to", "= [ \"#if defined(STRATUM_ARCH_%s)\" % \"PPC\", include_fmt % \"ppc\", \"#elif", "include_prefix = None, strip_include_prefix = None, data = None, testonly", "\"cd $${g3}\", \"cp %s.grpc.pb.h %s\" % (temp_stem, _loc(gen_grpc_pb_h)), \"cp %s.grpc.pb.cc", "logic to select path.${arch}.pb.h Also generates an alias that will", "name = src_arch + \".grpc.pb\", srcs = protoc_srcs_set, outs =", "Providing own implementation because import os is not allowed in", "\"#endif\", ] gen_cmds = [(\"printf '%%s\\\\n' '%s'\" % line) for", "if proto_include: includes = [proto_include] # Note: Public sc_proto_lib invokes", "visibility of the generated alias. Returns: Name of shim rule", "from cmdline else kill feature. _TRACE_SRCS = False # Used", "and all of the data needed for this package and", "result of the alias for ppc builds. x86: The result", "= decorate(name_arch, \"headers\"), srcs = hdrs + protoc_deps, visibility =", "by protoc, as well as the include paths used for", "linkopts, visibility = visibility, ) register_extension_info( extension_name = \"sc_cc_test\", label_regex_for_dep", "cc_test deps argument. data: Analogous to cc_test data argument. defines:", "be generated on another forge server. proto_path_cmds = [\"rollup=$$(sed \\\"s,G3LOC,$${PWD},g\\\"", "srcs: if [s for s in srcs.to_list() if not s.endswith(\".h\")]:", "sc_proto_lib. For each src path.proto, generates path.pb.h consisting of: #ifdef", "library. For every given ${src}.proto, generate: :${src}_${arch}.pb rule to run", "grpc shim for this proto lib. \"\"\" bash_vars = [\"g3=$${PWD}\"]", "label, expanding implicit targets if needed. def decorate(label, suffix): if", "| [name] | _SC_GRPC_DEPS grpc_gen_hdrs_plus = grpc_gen_hdrs + gen_hdrs sc_cc_lib(", "Args: name: The name of the alias target. host: The", "[regular_proto_name], visibility = visibility, testonly = testonly, ) # TODO(unknown):", "means that we must provide portable shared libs for our", "outs = [my_proto_rollup], cmd = proto_rollup_cmd, visibility = visibility, testonly", "textual_hdrs = grpc_gen_hdrs_plus, visibility = visibility, ) def _gen_proto_shims(name, pb_modifier,", "build this library for, None => ALL. visibility: Standard blaze", "can't use $${PWD} until this step, because our rollup command", "Analogous to cc_library name argument. deps: Analogous to cc_library deps", "label: # .../bar:bat -> .../bar:bat_suffix return \"%s_%s\" % (label, suffix)", "\"cp %s.pb.h %s\" % (temp_stem, _loc(gen_pb_h)), \"cp %s.pb.cc %s\" %", "such calls are OK as long as the arches are", "have hdrs or deps # attributes, so all embedded dependencies", "generates: * ${name}_shim aka .pb.h master switch - see _gen_proto_shims,", "arches else None, ppc = decorate(name, \"ppc\") if \"ppc\" in", "# Add any platform specific files to the final tarball.", "= src[0:-6] src_arch = \"%s_%s\" % (src_stem, arch) temp_stem =", "testonly, ) register_extension_info( extension_name = \"sc_proto_lib\", label_regex_for_dep = \"{extension_name}\", )", "\"\"\"Creates rules for the given portable library and arches. Args:", "variables/lists: ALL_ARCHES All known arches. EMBEDDED_ARCHES All embedded arches. EMBEDDED_PPC", "[\"//stratum/portage:dummy_with_main\"], arches, ), srcs = sc_platform_filter(srcs, [], arches), copts =", "native.FilesetEntry( srcdir = decorate(name, \"bin\"), destdir = \"bin\", ), native.FilesetEntry(", "containing respectively all of the binaries and all of the", "Analogous to cc_binary name argument. deps: Analogous to cc_binary deps", "= None): \"\"\"Public macro to alter blaze rules based on", "name for this package. bins: List of sc_cc_bin rules to", "[\"-I$(GENDIR)\", \"-I.\"] # Generate messages gen_pb_h = gen_stem + \".pb.h\"", "= (defines or []) + _ARCH_DEFINES textual_plus = textual_hdrs |", "the system is suboptimal - something along the lines of", "During embedded builds this target will generate a dummy binary", "this step, because our rollup command # might be generated", "data), ]: native.Fileset( name = decorate(fileset_name, extension), out = decorate(name,", "a dummy default target is used instead. Args: name: The", "build #-- \"-Os\", # Use this for program-sizing build \"-g\",", "return label.replace(\"//\", \"google3/\").replace(\":\", \"/\") elif label.startswith(\":\"): # :bat/baz -> bat/baz", "as input. * ${name}_py a py_proto_library version of this library.", "dquote(pkg + \"/\" + hdr_stem + \".%s\" + hdr_ext) lines", "includes = sc_platform_filter(includes, [], arches), include_prefix = include_prefix, strip_include_prefix =", "def sc_cc_lib( name, deps = None, srcs = None, hdrs", "None, x86 = None, default = None, visibility = None):", "Stratum builds. Generates a cc_test rule that doesn't break the", "% _loc(fileset_name), \"if [[ -e $${TEMP_DIR}/tarball/bin ]]\", \"then for f", "echo -IG3LOC/$(GENDIR)/%s; fi)\" % (full_proto_include, full_proto_include), ) accum_flags.append( \"$$(if [[", "if proto_include: fail(\"Cannot use proto_include on an sc_proto_lib with python", "+ _ARCH_DEFINES textual_plus = textual_hdrs | depset(deps.to_list()) cc_library( name =", "ppc, x86 = x86, ), visibility = visibility, ) #", "= \"//stratum:stratum_\" return select({ \"//conditions:default\": (host or default), config_label_prefix +", "\"//grpc:grpc_cpp_plugin\" def _loc(target): \"\"\"Return target location for constructing commands. Args:", "None, deps = None, data = None, defines = None,", "arch for .pb.h shims and other portability hacks. _ARCH_DEFINES =", "| sort -u -o $(@)\" % \"; \".join(proto_rollup_cmds) native.genrule( name", "Standard blaze visibility parameter, passed through to all filesets. \"\"\"", "for linking binaries. _EMBEDDED_LDFLAGS = [ # \"-static\", # Use", "None if proto_include == \".\": full_proto_include = native.package_name() elif proto_include:", "the proto_include field in this rule. services: List of services", "or default), }) # Generates an sc_platform_select based on a", "testonly = None, proto_include = None, python_support = False, services", "names are decorated to keep them different and allow all", "in prefix_list: if s.startswith(prefix): return prefix return None def sc_platform_select(host", "select(...) statement that can be used in most contexts to", "(full_proto_include, full_proto_include), ) else: temp_prefix = \"%s/%s\" % (cpp_out, native.package_name())", "gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc) cmds = bash_vars + [ \"mkdir -p %s\"", "in this rule. services: List of services to enable {\"grpc\",", "\"rpc\"}; Only \"grpc\" is supported. So \"rpc\" and \"grpc\" are", "services = []): \"\"\"Public macro to build multi-arch library from", "default = [], ) native.Fileset( name = fileset_name, out =", "prefix_list): for prefix in prefix_list: if s.startswith(prefix): return prefix return", "= \"@com_google_protobuf//:protobuf:protoc\" _PROTOBUF = \"@com_google_protobuf//:protobuf\" _SC_GRPC_PLUGIN = \"//sandblaze/prebuilt/protobuf:grpc_cpp_plugin\" _GRPC_PLUGIN =", "= [\"printf '%%s\\n' %s\" % flag for flag in accum_flags]", "+ hdr_ext) lines = [ \"#if defined(STRATUM_ARCH_%s)\" % \"PPC\", include_fmt", "= [] for dep in deps: if dep.endswith(\"_proto\"): protoc_deps.append(\"%s_%s_headers\" %", "run protoc ${src}.proto => ${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb rule to run protoc", "linkopts: Analogous to cc_test linkopts argument. visibility: Analogous to cc_test", "srcs argument. arches: List of architectures to generate this way.", "dep in deps], visibility = visibility, testonly = testonly, )", "depset(deps or []) shim_rule = _gen_proto_shims( name = name, pb_modifier", "= [ native.FilesetEntry( srcdir = decorate(name, \"bin\"), destdir = \"bin\",", "in srcs + hdrs: if src.endswith(\".proto\"): src_stem = src[0:-6] src_arch", "return label[1:] else: # bat/baz -> bat/baz return label #", "# \"-static\", # Use this for program-sizing build # \"-Wl,--gc-sections,--no-wchar-size-warning\",", "all to be generated and addressed independently. This aspect of", "to using the proto_include mechanism protoc_label = _PROTOC protobuf_label =", "# Generic path & label helpers. ============================================ def _normpath(path): \"\"\"Normalize", "platform. Args: host: The value to use for host builds.", "= name, host = decorate(name, \"host\") if \"host\" in arches", "of the system is suboptimal - something along the lines", "${name}_${arch}_pb protobuf compile rules - one for each arch. *", "= None, arches = None, visibility = None): \"\"\"Public macro", "multiple such calls are OK as long as the arches", "= \" && \".join(cmds) or \"true\", ) sc_platform_alias( name =", "\"-fno-builtin-malloc\", # We'll use tcmalloc \"-fno-builtin-calloc\", \"-fno-builtin-realloc\", \"-fno-builtin-free\", \"-D__STDC_FORMAT_MACROS=1\", #", ") for src in srcs + hdrs: if src.endswith(\".proto\"): src_stem", "% target def _gen_proto_lib( name, srcs, hdrs, deps, arch, visibility,", "a python proto library from this rule. Any sc_proto_lib with", "visibility: Standard blaze visibility argument. testonly: Standard blaze testonly argument.", "to add as \"-I\" compilation options. testonly: Standard blaze testonly", "arch; # multiple such calls are OK as long as", "srcs: pkg, filename = parse_label(src) if not filename.endswith(\".proto\"): continue hdr_stem", "sc_package Declare a portable tarball package. and the variables/lists: ALL_ARCHES", "EMBEDDED_ARCHES fileset_name = decorate(name, \"fs\") for extension, inputs in [", "-rf $${TEMP_DIR}\", ] native.genrule( name = decorate(name, \"tarball\"), srcs =", "proto_rollup_cmd, visibility = visibility, testonly = testonly, ) for src", "for C and C++ compiler invocations. _EMBEDDED_CFLAGS = [ \"-I$(GENDIR)\",", "native.genrule( name = src_arch + \".grpc.pb\", srcs = protoc_srcs_set, outs", "Analogous to cc_test data argument. defines: Analogous to cc_test defines", "None): \"\"\"Public macro to package binaries and data for deployment.", "= None): \"\"\"Public macro to package binaries and data for", "host, ppc = ppc, x86 = x86, ), visibility =", "# Don't mangle paths and confuse blaze. \"-fno-builtin-malloc\", # We'll", "= [ decorate(decorate(dep, arch), \"proto_rollup.flags\") for dep in deps if", "\"%s.%s\" % (src_stem, arch) # We can't use $${PWD} until", "# Used for C and C++ compiler invocations. _EMBEDDED_CFLAGS =", "and sc_proto_lib rules that depend on this rule. Typically \".\"", "build defs. For example ../../dir/to/deeply/nested/path/../../../other/path will become ../../dir/to/other/path Args: path:", "None, copts = None, linkopts = None, visibility = None):", "listed architecture. The names are decorated to keep them different", "# Allow C++11 features _and_ GNU extensions. ] # Used", "[\"$${rollup}\"] if proto_include: # We'll be cd-ing to another directory", "= None, ppc = None, x86 = None, default =", "build. Returns: $(location target) \"\"\" return \"$(location %s)\" % target", "src %s\" % (full_proto_include, proto_src_loc)) # By cd-ing to another", "to accumulate the set of .proto files needed to #", "elif label.startswith(\"//\"): # //foo/bar -> //foo/bar:bar_suffix return \"%s:%s_%s\" % (label,", "visibility = visibility, ) grpc_shim_rule = None if (service_enable[\"grpc\"]): grpc_shim_rule", "Base name for this package. bins: List of sc_cc_bin rules", "shim switch file. # Lines expand inside squotes, so quote", "proto_src_loc = \"%s/%s\" % (native.package_name(), src) proto_path_cmds.append(\"[[ -e %s ]]", "\"\"\" if not arches: if testonly: arches = HOST_ARCHES else:", "cc_binary(), but with different options and some restrictions. The key", "the alias for ppc builds. x86: The result of the", "# Generate lines for shim switch file. # Lines expand", "\"host\" in arches else None, ppc = decorate(name, \"ppc\") if", "service_enable = { \"grpc\": 0, } for service in services", "%s\" % (temp_stem, _loc(gen_grpc_pb_cc)), ] grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc] native.genrule(", "use this for program-sizing build \"-Wall\", \"-Werror\", # Warn lots,", "arches = [], visibility = None, testonly = None, proto_include", "= [native.FilesetEntry( srcdir = \"%s:BUILD\" % _PPC_GRTE, files = [\":libs\"],", "needed. The public symbols are the macros: decorate(path) sc_cc_lib Declare", "All host arches. STRATUM_INTERNAL For declaring Stratum internal visibility. The", "support. visibility: The blaze visibility of the generated alias. Returns:", "use proto_include on an sc_proto_lib with python support.\") _gen_py_proto_lib( name", "arch), deps = dep_set, srcs = gen_srcs, hdrs = hdrs", "+ proto_path_cmds + [ \" \".join([protoc] + gendir_include + proto_rollup_flags", "visibility = visibility, testonly = testonly, ) for src in", "= srcs + hdrs, arches = arches, visibility = visibility,", "protoc_deps, visibility = visibility, ) my_proto_rollup = decorate(name_arch, \"proto_rollup.flags\") protoc_srcs_set", "\"TEMP_DIR=$(@D)/stratum_packaging_temp\", \"mkdir $${TEMP_DIR}\", \"cp -r %s $${TEMP_DIR}/tarball\" % _loc(fileset_name), \"if", "copts: Analogous to cc_test copts argument. linkopts: Analogous to cc_test", "The blaze visibility of the generated alias. Returns: Name of", "statement that can be used in most contexts to alter", "cd $(GENDIR)/%s\" % full_proto_include, \"fi\", ])) gendir_include = [\"-I$${g3}/$(GENDIR)\", \"-I$${g3}\",", "Adds dquotes around a string. def dquote(s): return '\"' +", "for generated sc_cc_libs. grpc_shim_rule: If needed, the name of the", "to all filesets. \"\"\" bins = depset(bins or []) data", "protoc: ${src}_${arch}_proto_rollup.flags Feed each set into sc_cc_lib to wrap them", "host arches. STRATUM_INTERNAL For declaring Stratum internal visibility. The macros", "interacts safely with Stratum builds. Generates a cc_test rule that", "that can be used in most contexts to alter a", "| _SC_GRPC_DEPS grpc_gen_hdrs_plus = grpc_gen_hdrs + gen_hdrs sc_cc_lib( name =", "to build .pb.h multi-arch master switch for sc_proto_lib. For each", "in [ (\"bin\", [\"%s.stripped\" % b for b in bins.to_list()]),", "kill feature. _TRACE_SRCS = False # Used for all gcc", "decorations assumed, used and exported as header, not for flags,", "builds. Generates a cc_test rule that doesn't break the build", "$${g3}\", \"cp %s.grpc.pb.h %s\" % (temp_stem, _loc(gen_grpc_pb_h)), \"cp %s.grpc.pb.cc %s\"", "= \"#include \" + dquote(pkg + \"/\" + hdr_stem +", "data or [], defines = defines, copts = copts, linkopts", "work with sc_proto_lib's proto_include field, so we keep this simple.", "\"name\", generates: * ${name}_${arch}_bin and ${name}_${arch}_data filesets containing respectively all", "# bar -> bar_suffix return \"%s_%s\" % (label, suffix) #", "this for program-sizing build # \"-Wl,--gc-sections,--no-wchar-size-warning\", # Use this for", "dep_set, srcs = gen_srcs, hdrs = hdrs + gen_hdrs, arches", "+ [ \"mkdir -p %s\" % temp_prefix, ] + proto_path_cmds", "but also exported for dependent rules to utilize. deps: List", "proto_rollup_cmds.append(\"cat $(SRCS)\") proto_rollup_cmd = \"{ %s; } | sort -u", "EMBEDDED_PPC, EMBEDDED_X86, ] HOST_ARCH = \"host\" HOST_ARCHES = [HOST_ARCH] ALL_ARCHES", "another directory before protoc, so # adjust our .proto path", "= [\"STRATUM_ARCH_X86\"], ) STRATUM_INTERNAL = [ \"//stratum:__subpackages__\", ] # #", "argument. srcs: Analogous to cc_library srcs argument. hdrs: Analogous to", "along the lines of augmenting context with a user defined", "includes: Paths to add as \"-I\" compilation options. testonly: Standard", "defines = defs_plus, includes = includes, linkopts = [\"-ldl\", \"-lutil\"],", "macros are like cc_library(), proto_library(), and cc_binary(), but with different", "are the macros: decorate(path) sc_cc_lib Declare a portable Library. sc_proto_lib", "proto_include == \".\": full_proto_include = native.package_name() elif proto_include: full_proto_include =", "name_arch = decorate(name, arch) # We use this filegroup to", "protoc_deps.append(\"%s_%s_headers\" % (dep, arch)) name_arch = decorate(name, arch) # We", "Stratum P4 switch stack. To use this, load() this file", "xdeps, [], arches, ), data = sc_platform_filter(data, [], arches), visibility", "), data = sc_platform_filter(data, [], arches), visibility = visibility, )", "= visibility, ) return shim_rule def _gen_py_proto_lib(name, srcs, deps, visibility,", "package and all dependency packages. * ${name}_${arch} fileset containing the", "_SC_GRPC_DEPS = [ \"//sandblaze/prebuilt/grpc\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_base\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib\", ] _PROTOC = \"@com_google_protobuf//:protobuf:protoc\"", "argument. deps: Analogous to cc_test deps argument. data: Analogous to", "because import os is not allowed in build defs. For", "switch - see _gen_proto_shims, above. * ${name}_${arch}_pb protobuf compile rules", "do mv $${f} $${f%.stripped}\", # rename not available. \"done\", \"fi\",", "= copts, defines = defs_plus, includes = includes, linkopts =", "src lists. \"\"\" outs = [] cmds = [] hdr_ext", "+ \"'\" # Emulate Python 2.5+ str(startswith([prefix ...]) def starts_with(s,", "bash_vars = [\"g3=$${PWD}\"] # TODO(unknown): Switch protobuf to using the", "% _PPC_GRTE, files = [\":libs\"], destdir = \"lib/stratum\", symlinks =", "necessary changes # before tarballing. cmds = [ \"TEMP_DIR=$(@D)/stratum_packaging_temp\", \"mkdir", "(service_enable[\"grpc\"]): grpc_shim_rule = _gen_proto_shims( name = decorate(name[:-6], \"grpc_proto\"), pb_modifier =", "to cc_test linkopts argument. visibility: Analogous to cc_test visibility argument.", "genrule doesn't have hdrs or deps # attributes, so all", "this for program-sizing build \"-Wall\", \"-Werror\", # Warn lots, and", "List of proto files hdrs: More files to build into", "$(SRCS)\") proto_rollup_cmd = \"{ %s; } | sort -u -o", "% _loc(my_proto_rollup)] proto_rollup_flags = [\"$${rollup}\"] if proto_include: # We'll be", "includes = None, testonly = None, visibility = None): \"\"\"Creates", "py_name, api_version = 2, deps = [regular_proto_name], visibility = visibility,", "include path. This will affect the symbols generated by protoc,", "directories. Providing own implementation because import os is not allowed", "name, srcs = depset(srcs + hdrs), deps = deps, visibility", "EMBEDDED_ARCHES + HOST_ARCHES # Identify Stratum platform arch for .pb.h", "per (listed) arch; # which then calls sc_cc_lib with same", "generates: * ${name}_${arch}_bin and ${name}_${arch}_data filesets containing respectively all of", "new_hdr_name cmds.append(\"{ %s; } > %s\" % (\" && \".join(gen_cmds),", "= shim_rule, srcs = srcs, outs = outs, cmd =", "provided for a given platform, {default} is used instead. A", "), srcs = sc_platform_filter(srcs, [], arches), copts = copts, defines", "include_fmt % \"host\", \"#else\", \"#error Unknown STRATUM_ARCH\", \"#endif\", ] gen_cmds", "= 1 elif service == \"rpc\": service_enable[\"grpc\"] = 1 else:", "compiled - defaults to all if left unstated. Internally, libraries", "name: Base name for this library. srcs: List of .proto", "pb_modifier = \".pb\", srcs = srcs + hdrs, arches =", "\"$${g3}/%s\" % _loc(grpc_plugin) cpp_out = \"$${g3}/$(GENDIR)/%s/%s\" % (native.package_name(), arch) accum_flags", "[ \"//sandblaze/prebuilt/grpc\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_base\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib\", ] _PROTOC = \"@com_google_protobuf//:protobuf:protoc\" _PROTOBUF =", "\"grpc\" are equivalent. \"\"\" if not arches: if testonly: arches", "build # \"-Wl,--gc-sections,--no-wchar-size-warning\", # Use this for program-sizing build ]", "cmd = \" && \".join(cmds), heuristic_label_expansion = 0, visibility =", "%s.grpc.pb.h %s\" % (temp_stem, _loc(gen_grpc_pb_h)), \"cp %s.grpc.pb.cc %s\" % (temp_stem,", "deps: Analogous to cc_test deps argument. data: Analogous to cc_test", "we can use $(CC_FLAGS) instead of this. \"-D__GOOGLE_STL_LEGACY_COMPATIBILITY\", ] #", "Use this for program-sizing build \"-g\", # Don't use this", "and \"grpc\" are equivalent. \"\"\" if not arches: if testonly:", "decorate(name, \"shims\") native.genrule( name = shim_rule, srcs = srcs, outs", "builds only). testonly: Standard blaze testonly parameter. textual_hdrs: Analogous to", "lines of augmenting context with a user defined configuration fragment", "Currently supported architectures: ppc x86 \"\"\" load(\"//tools/build_defs/label:def.bzl\", \"parse_label\") load( \"//devtools/build_cleaner/skylark:build_defs.bzl\",", "= decorate(name, \"bin\"), destdir = \"bin\", ), native.FilesetEntry( srcdir =", "\"rm -rf $${TEMP_DIR}\", ] native.genrule( name = decorate(name, \"tarball\"), srcs", "# Lines expand inside squotes, so quote accordingly. include_fmt =", "destdir = \"lib/stratum\", symlinks = \"dereference\", )], default = [],", "Standard blaze testonly argument. \"\"\" regular_proto_name = decorate(name, \"default_pb\") py_name", "fileset_name, out = name, entries = [ native.FilesetEntry( srcdir =", "\".grpc.pb\", srcs = protoc_srcs_set, outs = grpc_pb_outs, tools = grpc_tools,", "argument. testonly: Standard blaze testonly argument. \"\"\" regular_proto_name = decorate(name,", "srcs = protoc_srcs_set, outs = grpc_pb_outs, tools = grpc_tools, cmd", "# \"-Wl,--gc-sections,--no-wchar-size-warning\", # Use this for program-sizing build ] #", "proto_include: Include path for generated sc_cc_libs. grpc_shim_rule: If needed, the", "includes, linkopts = [\"-ldl\", \"-lutil\"], testonly = testonly, visibility =", "use in follow-on hdrs and/or src lists. \"\"\" outs =", "defines: Analogous to cc_test defines argument. copts: Analogous to cc_test", "Analogous to cc_binary deps argument. srcs: Analogous to cc_binary srcs", "visibility = visibility, ) # Generate GRPC if grpc_shim_rule: gen_grpc_pb_h", "unstated. Internally, libraries and binaries are generated for every listed", "if python_support == True. Args: name: Base name for this", "if not arches: arches = ALL_ARCHES defs_plus = (defines or", "= None, testonly = None, proto_include = None, python_support =", "$(GENDIR)/%s ]]; then echo -IG3LOC/$(GENDIR)/%s; fi)\" % (full_proto_include, full_proto_include), )", ") def sc_cc_lib( name, deps = None, srcs = None,", ") # Generate GRPC if grpc_shim_rule: gen_grpc_pb_h = gen_stem +", "for a given platform, {default} is used instead. A specific", "a user defined configuration fragment would be a much cleaner", "argument. srcs: Analogous to cc_test srcs argument. deps: Analogous to", "default: The value to use for any of {host,ppc,x86} that", "srcs = sc_platform_select(host = srcs or [], default = []),", "arch) # We use this filegroup to accumulate the set", "and data filesets, mapped to bin/ and share/ respectively. *", "arches. EMBEDDED_PPC Name of PowerPC arch - \"ppc\". EMBEDDED_X86 Name", "gen_stem + \".pb.cc\" gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc) cmds = bash_vars + [", "Message protobuf(s). For library \"name\", generates: * ${name}_shim aka .pb.h", "[], arches, ), data = sc_platform_filter(data, [], arches), visibility =", "\"%s_%s\" % (label, suffix) # Creates a relative filename from", "\"fs\") for extension, inputs in [ (\"bin\", [\"%s.stripped\" % b", "different and allow all to be generated and addressed independently.", "result of the alias for any of {host,ppc,x86} that isn't", "as \"-I\" compilation options. include_prefix: Analogous to cc_library include_prefix argument.", "\".grpc.pb.h\" gen_grpc_pb_cc = gen_stem + \".grpc.pb.cc\" grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc) cmds =", "grpc_gen_hdrs + gen_hdrs sc_cc_lib( name = decorate(grpc_name, arch), deps =", "or []) shim_rule = _gen_proto_shims( name = name, pb_modifier =", "If no selection is provided for a given platform and", "[]) srcs = depset(srcs or []) hdrs = depset(hdrs or", "hdrs: if src.endswith(\".proto\"): src_stem = src[0:-6] src_arch = \"%s_%s\" %", "build \"-g\", # Don't use this for program-sizing build \"-Wall\",", "\"//stratum:__subpackages__\", ] # # Build options for all embedded architectures", "else: # bar -> bar_suffix return \"%s_%s\" % (label, suffix)", "Paths to add as \"-I\" compilation options. include_prefix: Analogous to", "sc_platform_filter(srcs, [], arches), copts = copts, defines = defs_plus, includes", "proto_src_loc = proto_src_loc[len(full_proto_include) + 1:] else: print(\"Invalid proto include '%s'", "to cc_library hdrs argument. arches: List of architectures to generate", "srcs, hdrs = [shim_rule] + hdrs, deps = deps, arch", "invokes this once per (listed) arch; # which then calls", "= visibility, ) register_extension_info( extension_name = \"sc_cc_bin\", label_regex_for_dep = \"{extension_name}\",", "parameter. \"\"\" deps = depset(deps or []) srcs = depset(srcs", "Generate messages gen_pb_h = gen_stem + \".pb.h\" gen_pb_cc = gen_stem", "srcs = proto_rollups, outs = [my_proto_rollup], cmd = proto_rollup_cmd, visibility", "= False, services = []): \"\"\"Public macro to build multi-arch", "proto_include, grpc_shim_rule): \"\"\"Creates rules and filegroups for embedded protobuf library.", "-p %s\" % temp_prefix, ] + proto_path_cmds + [ \"", "target platform architecture. If no selection is provided for a", "arch in EMBEDDED_ARCHES: grpc_plugin = _SC_GRPC_PLUGIN else: grpc_plugin = _GRPC_PLUGIN", "the generated alias. Returns: Name of shim rule for use", "None, srcs = None, deps = None, data = None,", "== None or x86 == None): fail(\"Missing a select value", "# Embedded build definitions. ============================================== EMBEDDED_PPC = \"ppc\" EMBEDDED_X86 =", "\".join([ \"if [[ -e %s ]]\" % (\"%s/%s\" % (full_proto_include,", "= size or \"small\", srcs = sc_platform_select(host = srcs or", "switch for sc_proto_lib. For each src path.proto, generates path.pb.h consisting", "None, includes = None, include_prefix = None, strip_include_prefix = None,", "= 2, deps = [regular_proto_name], visibility = visibility, testonly =", "defines, copts = copts, linkopts = linkopts, visibility = visibility,", "= visibility, testonly = testonly, ) register_extension_info( extension_name = \"sc_proto_lib\",", "= visibility, ) outs = [\"%s.tar.gz\" % name] # Copy", "options. testonly: Standard blaze testonly parameter. visibility: Standard blaze visibility", "= filename[0:-6] new_hdr_name = hdr_stem + hdr_ext outs.append(new_hdr_name) # Generate", "deps argument. data: Analogous to cc_test data argument. defines: Analogous", "[[ -e %s ]]; then echo -IG3LOC/%s; fi)\" % (full_proto_include,", "def _normpath(path): \"\"\"Normalize a path. Normalizes a path by removing", "use this, load() this file in a BUILD file, specifying", "sc_cc_lib Declare a portable Library. sc_proto_lib Declare a portable .proto", "extension_name = \"sc_cc_bin\", label_regex_for_dep = \"{extension_name}\", ) # Protobuf =================================================================", "any of {host,ppc,x86} that isn't specified. visibility: The visibility of", "them them up into a usable library; note that ${src}_${arch}_erpc_proto", "d in (\"\", \".\"): if result: continue elif d ==", "= defines, copts = copts, linkopts = linkopts, visibility =", "% (label, label.split(\"/\")[-1], suffix) else: # bar -> bar_suffix return", "Don't mangle paths and confuse blaze. \"-fno-builtin-malloc\", # We'll use", "arches are disjoint. sc_cc_lib( name = decorate(name, arch), deps =", "commands. Args: target: Blaze target name available to this build.", "\" \".join([protoc] + gendir_include + proto_rollup_flags + [ \"-I%s\" %", "= depset(deps or []) shim_rule = _gen_proto_shims( name = name,", "invocations. _EMBEDDED_FLAGS = [ \"-O0\", # Don't use this for", "EMBEDDED_ARCHES = [ EMBEDDED_PPC, EMBEDDED_X86, ] HOST_ARCH = \"host\" HOST_ARCHES", "= sc_platform_filter(copts, [], arches), defines = defs_plus, includes = sc_platform_filter(includes,", "srcs = grpc_gen_srcs, hdrs = hdrs + grpc_gen_hdrs_plus + [grpc_shim_rule],", "cleaner solution. Currently supported architectures: ppc x86 \"\"\" load(\"//tools/build_defs/label:def.bzl\", \"parse_label\")", "linkopts = None, visibility = None): \"\"\"Creates a cc_test rule", "or []) textual_hdrs = depset(textual_hdrs or []) if srcs: if", "and share/ respectively. * ${name}_${arch}_tarball rule builds that .tar.gz package.", "%s)\" % _loc(my_proto_rollup)] proto_rollup_flags = [\"$${rollup}\"] if proto_include: # We'll", "extension_name = \"sc_cc_lib\", label_regex_for_dep = \"{extension_name}\", ) def sc_cc_bin( name,", "this library. Only generated if python_support == True. Args: name:", "builds. ppc: The value to use for ppc builds. x86:", "parameter, passed through to subsequent rules. testonly: Standard blaze testonly", "\"PPC\", include_fmt % \"ppc\", \"#elif defined(STRATUM_ARCH_%s)\" % \"X86\", include_fmt %", "filesets, mapped to bin/ and share/ respectively. * ${name}_${arch}_tarball rule", "%s)\" % target def _gen_proto_lib( name, srcs, hdrs, deps, arch,", "host: The value to use for host builds. ppc: The", "the appropriate target. If no selection is provided for a", "our .proto path accordingly. proto_src_loc = \"%s/%s\" % (native.package_name(), src)", "default = None): \"\"\"Public macro to alter blaze rules based", "srcs argument. hdrs: Analogous to cc_library hdrs argument. arches: List", "deps = [], arches = [], visibility = None, testonly", "cc_library strip_include_prefix argument. data: Files to provide as data at", "The result of the alias for x86 builds. default: The", "this proto. native.filegroup( name = decorate(name_arch, \"headers\"), srcs = hdrs", "= host, ppc = ppc, x86 = x86, ), visibility", "visibility, testonly = testonly, ) # TODO(unknown): Add support for", "to build this library for, None => ALL. visibility: Standard", "based on a textual list of arches. def sc_platform_filter(value, default,", "[ (\"bin\", [\"%s.stripped\" % b for b in bins.to_list()]), (\"data\",", "data at runtime (host builds only). testonly: Standard blaze testonly", "= decorate(name[:-6], \"grpc_proto\"), pb_modifier = \".grpc.pb\", srcs = srcs +", "libs for our ppc # executables. ppc = [native.FilesetEntry( srcdir", "srcs: List of proto files. arches: List of arches this", "[grpc_shim_rule], arches = [arch], copts = [], includes = includes,", "= [], ) native.Fileset( name = fileset_name, out = name,", "if [s for s in srcs.to_list() if not s.endswith(\".h\")]: alwayslink", "for s in srcs.to_list() if not s.endswith(\".h\")]: alwayslink = 1", "Args: name: Analogous to cc_library name argument. deps: Analogous to", "= [gen_pb_h, gen_pb_cc] native.genrule( name = src_arch + \".pb\", srcs", "= include_prefix, strip_include_prefix = strip_include_prefix, testonly = testonly, textual_hdrs =", "squotes around a string. def squote(s): return \"'\" + s", "def dquote(s): return '\"' + s + '\"' # Adds", "[native.FilesetEntry( srcdir = \"%s:BUILD\" % _PPC_GRTE, files = [\":libs\"], destdir", "name = name, size = size or \"small\", srcs =", "decorate(name, arch) # We use this filegroup to accumulate the", "in accum_flags] proto_rollup_cmds.append(\"cat $(SRCS)\") proto_rollup_cmd = \"{ %s; } |", "on sc_proto_libs that also have python support, and may not", "The key difference: you can supply lists of architectures for", "[]) data = depset(data or []) textual_hdrs = depset(textual_hdrs or", "arches, visibility = visibility, ) grpc_shim_rule = None if (service_enable[\"grpc\"]):", "Generates an sc_platform_select based on a textual list of arches.", "cc_library include_prefix argument. strip_include_prefix: Analogous to cc_library strip_include_prefix argument. data:", "deps: Standard blaze deps argument. visibility: Standard blaze visibility argument.", "visibility, ) # Add any platform specific files to the", "\"bin\"), destdir = \"bin\", ), native.FilesetEntry( srcdir = decorate(name, \"data\"),", "to package binaries and data for deployment. For package \"name\",", "value to use for ppc builds. x86: The value to", "= visibility, ) # Generate GRPC if grpc_shim_rule: gen_grpc_pb_h =", "register_extension_info( extension_name = \"sc_cc_test\", label_regex_for_dep = \"{extension_name}\", ) def sc_cc_lib(", "\"'\" # Emulate Python 2.5+ str(startswith([prefix ...]) def starts_with(s, prefix_list):", "deps = sc_platform_filter( deps, [\"//stratum/portage:dummy_with_main\"], arches, ), srcs = sc_platform_filter(srcs,", "each set into sc_cc_lib to wrap them them up into", "full_proto_include, \"fi\", ])) gendir_include = [\"-I$${g3}/$(GENDIR)\", \"-I$${g3}\", \"-I.\"] else: proto_src_loc", "= EMBEDDED_ARCHES fileset_name = decorate(name, \"fs\") for extension, inputs in", "that doesn't break the build when an embedded arch is", "supply lists of architectures for which they should be compiled", "STRATUM_INTERNAL For declaring Stratum internal visibility. The macros are like", "= None, xdeps = None): \"\"\"Creates rules for the given", "directory before protoc, so # adjust our .proto path accordingly.", "hdrs = [], deps = [], arches = [], visibility", "support may only depend on sc_proto_libs that also have python", "absolute or relative path to normalize. Returns: A path equivalent", "= decorate(name, \"ppc\") if \"ppc\" in arches else None, x86", "= decorate(name, \"shims\") native.genrule( name = shim_rule, srcs = srcs,", "= [shim_rule] + hdrs, deps = deps, arch = arch,", "regular_proto_name, srcs = srcs, deps = [decorate(dep, \"default_pb\") for dep", "None, deps = None, arches = None, visibility = None):", "+ gen_hdrs, arches = [arch], copts = [], includes =", "decorated to keep them different and allow all to be", "# executables. ppc = [native.FilesetEntry( srcdir = \"%s:BUILD\" % _PPC_GRTE,", "= sc_platform_select( host = deps or [], default = [\"//stratum/portage:dummy_with_main\"],", "= grpc_tools, cmd = \" && \".join(cmds), heuristic_label_expansion = 0,", "None, data = None, testonly = None, textual_hdrs = None,", "add.\") config_label_prefix = \"//stratum:stratum_\" return select({ \"//conditions:default\": (host or default),", "value if \"ppc\" in arches else default, x86 = value", "should support. visibility: The blaze visibility of the generated alias.", "\"proto_rollup.flags\") for dep in deps if dep.endswith(\"_proto\") ] proto_rollup_cmds =", "argument. deps: Analogous to cc_library deps argument. srcs: Analogous to", "\"ppc\". EMBEDDED_X86 Name of \"x86\" arch. HOST_ARCH Name of default", "sort -u -o $(@)\" % \"; \".join(proto_rollup_cmds) native.genrule( name =", "continue else: level += 1 result.append(d) return sep.join(result) # Adds", "rule. Typically \".\" python_support: Defaults to False. If True, generate", "+ [ \"-I%s\" % protobuf_include, \"--grpc-cpp_out=%s\" % cpp_out, proto_src_loc, ]),", "> 0: result.pop() level += -1 continue else: level +=", "name for this library. pb_modifier: protoc plugin-dependent file extension (e.g.:", "= None, defines = None, copts = None, linkopts =", "for program-sizing build #-- \"-Os\", # Use this for program-sizing", "default, x86 = value if \"x86\" in arches else default,", "sc_platform_alias( name, host = None, ppc = None, x86 =", "[]) if not arches: arches = ALL_ARCHES defs_plus = (defines", "\"x86\" in arches else None, visibility = visibility, ) return", "of this library - no decorations assumed, used and exported", "accum_flags.append( \"$$(if [[ -e %s ]]; then echo -IG3LOC/%s; fi)\"", "and (host == None or ppc == None or x86", "proto_src_loc = \"%s/%s\" % (native.package_name(), src) if proto_src_loc.startswith(full_proto_include + \"/\"):", "cd $(GENDIR)\" % proto_src_loc) gendir_include = [\"-I$(GENDIR)\", \"-I.\"] # Generate", "sc_cc_lib and sc_proto_lib rules that depend on this rule. Typically", "depset(deps) | [protobuf_label] includes = [] if proto_include: includes =", "visibility = visibility, ) # Embedded build definitions. ============================================== EMBEDDED_PPC", "else: proto_src_loc = \"%s/%s\" % (native.package_name(), src) proto_path_cmds.append(\"[[ -e %s", "an alias that will select the appropriate proto target based", "with different options and some restrictions. The key difference: you", "to add as \"-I\" compilation options. include_prefix: Analogous to cc_library", "\" && \".join(cmds) or \"true\", ) sc_platform_alias( name = name,", "files (and file producing rules) to be packaged. deps: List", "visibility, testonly): \"\"\"Creates a py_proto_library from the given srcs. There's", "label.endswith(\":\"): # .../bar: -> .../bar label = label[:-1] if \":\"", "we keep this simple. For library \"name\", generates: * ${name}_default_pb,", "= \"%s/%s\" % (cpp_out, native.package_name()) proto_rollups = [ decorate(decorate(dep, arch),", "None, arches = None, visibility = None): \"\"\"Public macro to", "= grpc_gen_hdrs_plus, visibility = visibility, ) def _gen_proto_shims(name, pb_modifier, srcs,", "\"ppc\": (ppc or default), config_label_prefix + \"x86\": (x86 or default),", "if \"host\" in arches else None, ppc = decorate(name, \"ppc\")", "this build. Returns: $(location target) \"\"\" return \"$(location %s)\" %", "= [], deps = [], arches = [], visibility =", "if python_support: if proto_include: fail(\"Cannot use proto_include on an sc_proto_lib", "# Used for all gcc invocations. _EMBEDDED_FLAGS = [ \"-O0\",", "to wrap them them up into a usable library; note", "cc_library srcs argument. hdrs: Analogous to cc_library hdrs argument. arches:", "fileset containing the corresponding bin and data filesets, mapped to", "hdr_stem + \".%s\" + hdr_ext) lines = [ \"#if defined(STRATUM_ARCH_%s)\"", "$${TEMP_DIR}/tarball\" % _loc(fileset_name), \"if [[ -e $${TEMP_DIR}/tarball/bin ]]\", \"then for", "one platform in \" + \"sc_platform_select. Please add.\") config_label_prefix =", "# multiple such calls are OK as long as the", "would be a much cleaner solution. Currently supported architectures: ppc", "different symbols. Careful, our proto might be in GENDIR! proto_path_cmds.append(\";", "data: List of files (and file producing rules) to be", "[], arches), alwayslink = alwayslink, copts = sc_platform_filter(copts, [], arches),", "else: grpc_plugin = _GRPC_PLUGIN protoc_deps = [] for dep in", "% (full_proto_include, full_proto_include), ) else: temp_prefix = \"%s/%s\" % (cpp_out,", "srcs = srcs, outs = outs, cmd = \" &&", "augmenting context with a user defined configuration fragment would be", "for Stratum P4 switch stack. To use this, load() this", "Base name for this library. pb_modifier: protoc plugin-dependent file extension", "register_extension_info( extension_name = \"sc_cc_lib\", label_regex_for_dep = \"{extension_name}\", ) def sc_cc_bin(", "name, srcs = srcs, hdrs = [shim_rule] + hdrs, deps", "register_extension_info( extension_name = \"sc_proto_lib\", label_regex_for_dep = \"{extension_name}\", ) def sc_package(", "full_proto_include, \"else cd $(GENDIR)/%s\" % full_proto_include, \"fi\", ])) gendir_include =", "used instead. A specific value or default must be provided", "[ decorate(decorate(dep, arch), \"proto_rollup.flags\") for dep in deps if dep.endswith(\"_proto\")", "# Used for linking binaries. _EMBEDDED_LDFLAGS = [ # \"-static\",", "suffix) elif label.startswith(\"//\"): # //foo/bar -> //foo/bar:bar_suffix return \"%s:%s_%s\" %", "to cc_test size argument. srcs: Analogous to cc_test srcs argument.", "[ native.FilesetEntry( files = inputs, ), ] + [ native.FilesetEntry(srcdir", "Analogous to cc_test visibility argument. \"\"\" cc_test( name = name,", "None, strip_include_prefix = None, data = None, testonly = None,", "hdrs argument. arches: List of architectures to generate this way.", "argument. visibility: Standard blaze visibility argument. testonly: Standard blaze testonly", "file in a BUILD file, specifying the symbols needed. The", "tarballing. cmds = [ \"TEMP_DIR=$(@D)/stratum_packaging_temp\", \"mkdir $${TEMP_DIR}\", \"cp -r %s", "= bash_vars + [ \"mkdir -p %s\" % temp_prefix, ]", "exported as header, not for flags, libs, etc. \"\"\" alwayslink", "= None): \"\"\"Creates rules for the given portable binary and", "protobuf to using the proto_include mechanism protoc_label = _PROTOC protobuf_label", "Analogous to cc_test srcs argument. deps: Analogous to cc_test deps", "as \"-D\" compilation options. includes: Paths to add as \"-I\"", "cc_test copts argument. linkopts: Analogous to cc_test linkopts argument. visibility:", "selection is provided for a given platform, {default} is used", "python_support = False, services = []): \"\"\"Public macro to build", "\"\"\"Normalize a path. Normalizes a path by removing unnecessary path-up", "a given platform, {default} is used instead. A specific value", "We do a bit of extra work with these include", "select value for at least one platform in \" +", "\"{extension_name}\", ) def sc_package( name = None, bins = None,", "Declare a portable .proto Library. sc_cc_bin Declare a portable Binary.", "= [ EMBEDDED_PPC, EMBEDDED_X86, ] HOST_ARCH = \"host\" HOST_ARCHES =", "grpc_plugin = \"$${g3}/%s\" % _loc(grpc_plugin) cpp_out = \"$${g3}/$(GENDIR)/%s/%s\" % (native.package_name(),", "None, testonly = None, proto_include = None, python_support = False,", "will select the appropriate target. If no selection is provided", "dependencies appear as a `src'. # TODO(unknown): if useful again", "# Adds squotes around a string. def squote(s): return \"'\"", "Typically \".\" python_support: Defaults to False. If True, generate a", "] # Used for C++ compiler invocations. _EMBEDDED_CXXFLAGS = [", "name argument. deps: Analogous to cc_library deps argument. srcs: Analogous", "return \"$(location %s)\" % target def _gen_proto_lib( name, srcs, hdrs,", "dummy default target is used instead. Args: name: The name", "The name of the alias target. host: The result of", "+ grpc_gen_hdrs_plus + [grpc_shim_rule], arches = [arch], copts = [],", "\"x86\") if \"x86\" in arches else None, visibility = visibility,", "defined(STRATUM_ARCH_%s)\" % \"PPC\", include_fmt % \"ppc\", \"#elif defined(STRATUM_ARCH_%s)\" % \"X86\",", "value if \"x86\" in arches else default, ) def sc_platform_alias(", "x86 builds. default: The result of the alias for any", "% cpp_out, proto_src_loc, ]), \"cd $${g3}\", \"cp %s.pb.h %s\" %", "\"\"\" return \"$(location %s)\" % target def _gen_proto_lib( name, srcs,", "into this library, but also exported for dependent rules to", "src) proto_path_cmds.append(\"[[ -e %s ]] || cd $(GENDIR)\" % proto_src_loc)", "srcs = depset(srcs or []) if not arches: arches =", "visibility: The blaze visibility of the generated alias. Returns: Name", "srcs = [\":%s\" % fileset_name], outs = outs, cmd =", "= depset(includes or []) data = depset(data or []) textual_hdrs", "= [\":%s\" % fileset_name], outs = outs, cmd = \";", "filegroups for embedded protobuf library. For every given ${src}.proto, generate:", "\"//sandblaze/prebuilt/protobuf:grpc_cpp_plugin\" _GRPC_PLUGIN = \"//grpc:grpc_cpp_plugin\" def _loc(target): \"\"\"Return target location for", "[]) + _ARCH_DEFINES cc_binary( name = name, deps = sc_platform_filter(", "testonly = testonly, textual_hdrs = gen_hdrs, visibility = visibility, )", "on target arch. Generates a blaze alias that will select", "Generate GRPC if grpc_shim_rule: gen_grpc_pb_h = gen_stem + \".grpc.pb.h\" gen_grpc_pb_cc", "this file in a BUILD file, specifying the symbols needed.", "packaged. data: List of files (and file producing rules) to", "+ hdrs, arches = arches, visibility = visibility, ) grpc_shim_rule", "on another forge server. proto_path_cmds = [\"rollup=$$(sed \\\"s,G3LOC,$${PWD},g\\\" %s)\" %", "Args: name: Analogous to cc_binary name argument. deps: Analogous to", ":${src}_${arch}.pb rule to run protoc ${src}.proto => ${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb rule", "= [ \"-I$(GENDIR)\", ] # Used for C++ compiler invocations.", "OK as long as the arches are disjoint. sc_cc_lib( name", "1 elif service == \"rpc\": service_enable[\"grpc\"] = 1 else: fail(\"service='%s'", "\"cp %s.pb.cc %s\" % (temp_stem, _loc(gen_pb_cc)), ] pb_outs = [gen_pb_h,", "\"%s:BUILD\" % _PPC_GRTE, files = [\":libs\"], destdir = \"lib/stratum\", symlinks", "(host == None or ppc == None or x86 ==", "ppc = value if \"ppc\" in arches else default, x86", "proto_src_loc, ]), \"cd $${g3}\", \"cp %s.pb.h %s\" % (temp_stem, _loc(gen_pb_h)),", "* ${name}_py a py_proto_library version of this library. Only generated", "\"//conditions:default\": (host or default), config_label_prefix + \"ppc\": (ppc or default),", "GNU extensions. ] # Used for linking binaries. _EMBEDDED_LDFLAGS =", "if srcs: if [s for s in srcs.to_list() if not", "Defaults to False. If True, generate a python proto library", "a BUILD file, specifying the symbols needed. The public symbols", "= \"bin\", ), native.FilesetEntry( srcdir = decorate(name, \"data\"), destdir =", "Standard blaze name argument. srcs: Standard blaze srcs argument. deps:", "temp_prefix, ] + proto_path_cmds + [ \" \".join([protoc] + gendir_include", "We use a different ppc toolchain for Stratum. # This", "make any necessary changes # before tarballing. cmds = [", "= pb_outs, tools = tools, cmd = \" && \".join(cmds),", "label.startswith(\"//\"): # //foo/bar:bat/baz -> google3_foo/bar/bat/baz return label.replace(\"//\", \"google3/\").replace(\":\", \"/\") elif", "+ \".tar.gz\"), \"rm -rf $${TEMP_DIR}\", ] native.genrule( name = decorate(name,", "the arches are disjoint. sc_cc_lib( name = decorate(name, arch), deps", "implicit targets if needed. def decorate(label, suffix): if label.endswith(\":\"): #", "+ hdrs + protoc_deps + protobuf_srcs + [my_proto_rollup]) gen_srcs =", "of this. \"-D__GOOGLE_STL_LEGACY_COMPATIBILITY\", ] # Used for C and C++", "[[ -e $(GENDIR)/%s ]]; then echo -IG3LOC/$(GENDIR)/%s; fi)\" % (full_proto_include,", "generated on another forge server. proto_path_cmds = [\"rollup=$$(sed \\\"s,G3LOC,$${PWD},g\\\" %s)\"", "* ${name}_${arch} fileset containing the corresponding bin and data filesets,", "For library \"name\", generates: * ${name}_default_pb, a regular proto library.", "STRATUM_INTERNAL = [ \"//stratum:__subpackages__\", ] # # Build options for", "x86 = None, default = None): \"\"\"Public macro to alter", "these include flags to avoid generating # warnings. accum_flags.append( \"$$(if", "visibility, ) if grpc_shim_rule: grpc_name = name[:-6] + \"_grpc_proto\" grpc_dep_set", "proto_path_cmds = [\"rollup=$$(sed \\\"s,G3LOC,$${PWD},g\\\" %s)\" % _loc(my_proto_rollup)] proto_rollup_flags = [\"$${rollup}\"]", "allowed in build defs. For example ../../dir/to/deeply/nested/path/../../../other/path will become ../../dir/to/other/path", "to the final tarball. platform_entries = sc_platform_select( # We use", "= [protobuf_hdrs] protobuf_include = \"$${g3}/protobuf/src\" if arch in EMBEDDED_ARCHES: grpc_plugin", "not filename.endswith(\".proto\"): continue hdr_stem = filename[0:-6] new_hdr_name = hdr_stem +", "visibility, ) register_extension_info( extension_name = \"sc_cc_test\", label_regex_for_dep = \"{extension_name}\", )", "def _gen_proto_lib( name, srcs, hdrs, deps, arch, visibility, testonly, proto_include,", "= \"/\" level = 0 result = [] for d", "\"-D__STDC_FORMAT_MACROS=1\", # TODO(unknown): Figure out how we can use $(CC_FLAGS)", "platform and no default is set, a dummy default target", "copts = copts, linkopts = linkopts, visibility = visibility, )", "this proto lib. \"\"\" bash_vars = [\"g3=$${PWD}\"] # TODO(unknown): Switch", "defaults to all if left unstated. Internally, libraries and binaries", "architectures to build this library for, None => EMBEDDED_ARCHES (HOST_ARCHES", "for all gcc invocations. _EMBEDDED_FLAGS = [ \"-O0\", # Don't", "full_proto_include = native.package_name() elif proto_include: full_proto_include = \"%s/%s\" % (native.package_name(),", "collects include options for protoc: ${src}_${arch}_proto_rollup.flags Feed each set into", "<reponame>cholve/stratum<filename>stratum/portage/build_defs.bzl # Copyright 2018 Google LLC # Copyright 2018-present Open", "until this step, because our rollup command # might be", "cc_test data argument. defines: Analogous to cc_test defines argument. copts:", "valid absolute or relative path to normalize. Returns: A path", "size = size or \"small\", srcs = sc_platform_select(host = srcs", "and binaries are generated for every listed architecture. The names", "program-sizing build # \"-Wl,--gc-sections,--no-wchar-size-warning\", # Use this for program-sizing build", "rule that doesn't break the build when an embedded arch", "= depset(data or []) textual_hdrs = depset(textual_hdrs or []) if", "= _PROTOBUF protobuf_hdrs = \"%s:well_known_types_srcs\" % protobuf_label protobuf_srcs = [protobuf_hdrs]", "platform_entries, visibility = visibility, ) outs = [\"%s.tar.gz\" % name]", "\"\"\"Macro to build .pb.h multi-arch master switch for sc_proto_lib. For", "respectively all of the binaries and all of the data", "(src_stem, arch) # We can't use $${PWD} until this step,", "\":\" in label: # .../bar:bat -> .../bar:bat_suffix return \"%s_%s\" %", "config_label_prefix + \"x86\": (x86 or default), }) # Generates an", "bins = None, data = None, deps = None, arches", "to generate this way. copts: Analogous to cc_binary copts argument.", "= protoc_srcs_set, outs = grpc_pb_outs, tools = grpc_tools, cmd =", "gen_pb_h = gen_stem + \".pb.h\" gen_pb_cc = gen_stem + \".pb.cc\"", "[ # \"-static\", # Use this for program-sizing build #", "default), config_label_prefix + \"x86\": (x86 or default), }) # Generates", "= [\"-ldl\", \"-lutil\"], testonly = testonly, visibility = visibility, )", "= gen_srcs, hdrs = hdrs + gen_hdrs, arches = [arch],", "= decorate(name, \"data\"), destdir = \"share\", ), ] + platform_entries,", "%s\" % (temp_stem, _loc(gen_grpc_pb_h)), \"cp %s.grpc.pb.cc %s\" % (temp_stem, _loc(gen_grpc_pb_cc)),", "for each arch. * sc_cc_lib(name) with those as input. *", "to cc_test srcs argument. deps: Analogous to cc_test deps argument.", "if \"host\" in arches else default, ppc = value if", "for dep in deps], visibility = visibility, testonly = testonly,", "visibility = visibility, ) outs = [\"%s.tar.gz\" % name] #", "copts: Analogous to cc_binary copts argument. defines: Symbols added as", "files = [\":libs\"], destdir = \"lib/stratum\", symlinks = \"dereference\", )],", "or []) if not arches: arches = EMBEDDED_ARCHES fileset_name =", "\"//\" and \":\". def _make_filename(label): if label.startswith(\"//\"): # //foo/bar:bat/baz ->", "= srcs, deps = [decorate(dep, \"default_pb\") for dep in deps],", "deps: List of deps for this library arches: Which architectures", "= None, srcs = None, deps = None, data =", ".pb.h master switch - see _gen_proto_shims, above. * ${name}_${arch}_pb protobuf", "default = None, visibility = None): \"\"\"Public macro to create", "proto_include: # We'll be cd-ing to another directory before protoc,", "rule builds that .tar.gz package. Args: name: Base name for", "use this for program-sizing build #-- \"-Os\", # Use this", "generated if python_support == True. Args: name: Base name for", "a string. def dquote(s): return '\"' + s + '\"'", "% full_proto_include, \"fi\", ])) gendir_include = [\"-I$${g3}/$(GENDIR)\", \"-I$${g3}\", \"-I.\"] else:", "or []) if not arches: arches = ALL_ARCHES defs_plus =", "fail(\"Cannot use proto_include on an sc_proto_lib with python support.\") _gen_py_proto_lib(", "% \"PPC\", include_fmt % \"ppc\", \"#elif defined(STRATUM_ARCH_%s)\" % \"X86\", include_fmt", "None, proto_include = None, python_support = False, services = []):", "or []) if srcs: if [s for s in srcs.to_list()", "field in this rule. services: List of services to enable", "% service) deps = depset(deps or []) shim_rule = _gen_proto_shims(", ") def sc_platform_alias( name, host = None, ppc = None,", "calls sc_cc_lib with same name for each arch; # multiple", "def sc_cc_bin( name, deps = None, srcs = None, arches", "or [], defines = defines, copts = copts, linkopts =", "`src'. # TODO(unknown): if useful again then inject from cmdline", "testonly = testonly, visibility = visibility, ) register_extension_info( extension_name =", "defined configuration fragment would be a much cleaner solution. Currently", "library. * ${name}_py, a py_proto_library based on ${name}_default_pb. Args: name:", "the final tarball. platform_entries = sc_platform_select( # We use a", "label_regex_for_dep = \"{extension_name}\", ) # Protobuf ================================================================= _SC_GRPC_DEPS = [", "ppc builds. x86: The value to use for x86 builds.", "used in most contexts to alter a blaze rule based", "= \"{extension_name}\", ) # Protobuf ================================================================= _SC_GRPC_DEPS = [ \"//sandblaze/prebuilt/grpc\",", "proto_include: includes = [proto_include] # Note: Public sc_proto_lib invokes this", "0, } for service in services or []: if service", "} for service in services or []: if service ==", "the platform architecture. Generates a blaze select(...) statement that can", "# We use this filegroup to accumulate the set of", "\".join(proto_rollup_cmds) native.genrule( name = decorate(name_arch, \"proto_rollup\"), srcs = proto_rollups, outs", "segments and its corresponding directories. Providing own implementation because import", "embedded dependencies appear as a `src'. # TODO(unknown): if useful", "to show sources in embedded sc_cc_lib compile steps. # This", "include_fmt % \"x86\", \"#elif defined(STRATUM_ARCH_%s)\" % \"HOST\", include_fmt % \"host\",", "fi)\" % (full_proto_include, full_proto_include), ) accum_flags.append( \"$$(if [[ -e %s", "= \"//grte/v4_x86/release/usr/grte/v4\" # Portability definitions =================================================== def sc_cc_test( name, size", "cc_test size argument. srcs: Analogous to cc_test srcs argument. deps:", ".../bar: -> .../bar label = label[:-1] if \":\" in label:", "when an embedded arch is selected. During embedded builds this", "provide as data at runtime (host builds only). testonly: Standard", "sc_cc_lib with same name for each arch; # multiple such", "a given platform and no default is set, a dummy", "copts, defines = defs_plus, includes = includes, linkopts = [\"-ldl\",", "% cpp_out, proto_src_loc, ]), \"cd $${g3}\", \"cp %s.grpc.pb.h %s\" %", "in arches else None, x86 = decorate(name, \"x86\") if \"x86\"", "+ [ \"-I%s\" % protobuf_include, \"--cpp_out=%s\" % cpp_out, proto_src_loc, ]),", "=> EMBEDDED_ARCHES (HOST_ARCHES not generally supported). visibility: Standard blaze visibility", "embedded sc_cc_lib compile steps. # This is more general than", "====================================================================== _X86_GRTE = \"//grte/v4_x86/release/usr/grte/v4\" # Portability definitions =================================================== def sc_cc_test(", "deps = None, data = None, defines = None, copts", "rule for use in follow-on hdrs and/or src lists. \"\"\"", "or \"true\", ) sc_platform_alias( name = name, host = decorate(name,", "dquotes around a string. def dquote(s): return '\"' + s", "of path-up segments. Invalid input paths will stay invalid. \"\"\"", "argument. deps: Analogous to cc_binary deps argument. srcs: Analogous to", "name, host = decorate(name, \"host\") if \"host\" in arches else", "\"cc_library\", \"cc_test\") # Generic path & label helpers. ============================================ def", "| depset(deps.to_list()) cc_library( name = name, deps = sc_platform_filter(deps, [],", "gen_pb_cc] native.genrule( name = src_arch + \".pb\", srcs = protoc_srcs_set,", "= depset(copts or []) includes = depset(includes or []) data", "any necessary changes # before tarballing. cmds = [ \"TEMP_DIR=$(@D)/stratum_packaging_temp\",", "\"\"\" cc_test( name = name, size = size or \"small\",", "protobuf_include, \"--grpc-cpp_out=%s\" % cpp_out, proto_src_loc, ]), \"cd $${g3}\", \"cp %s.grpc.pb.h", "${name}_${arch}_bin and ${name}_${arch}_data filesets containing respectively all of the binaries", "for this library. srcs: List of proto files hdrs: More", "a cc_test rule that interacts safely with Stratum builds. Generates", "None, visibility = None): \"\"\"Public macro to create an alias", "Standard blaze deps argument. visibility: Standard blaze visibility argument. testonly:", "deployment. For package \"name\", generates: * ${name}_${arch}_bin and ${name}_${arch}_data filesets", "%s $${TEMP_DIR}/tarball\" % _loc(fileset_name), \"if [[ -e $${TEMP_DIR}/tarball/bin ]]\", \"then", "+ \".grpc.pb\", srcs = protoc_srcs_set, outs = grpc_pb_outs, tools =", "None): fail(\"Missing a select value for at least one platform", "a py_proto_library based on ${name}_default_pb. Args: name: Standard blaze name", "= 0, visibility = visibility, ) # Generate GRPC if", "Standard blaze testonly parameter. proto_include: Include path for generated sc_cc_libs.", "Use this for program-sizing build # \"-Wl,--gc-sections,--no-wchar-size-warning\", # Use this", "|| cd $(GENDIR)\" % proto_src_loc) gendir_include = [\"-I$(GENDIR)\", \"-I.\"] #", "Analogous to cc_library deps argument. srcs: Analogous to cc_library srcs", "= py_name, api_version = 2, deps = [regular_proto_name], visibility =", "to add to include path. This will affect the symbols", "supported. So \"rpc\" and \"grpc\" are equivalent. \"\"\" if not", "\"%s:well_known_types_srcs\" % protobuf_label protobuf_srcs = [protobuf_hdrs] protobuf_include = \"$${g3}/protobuf/src\" if", "# .../bar: -> .../bar label = label[:-1] if \":\" in", "Don't use this for program-sizing build #-- \"-Os\", # Use", "None, bins = None, data = None, deps = None,", "A specific value or default must be provided for every", "path with minimal use of path-up segments. Invalid input paths", "heuristic_label_expansion = 0, visibility = visibility, ) dep_set = depset(deps)", "deps = grpc_dep_set, srcs = grpc_gen_srcs, hdrs = hdrs +", "must provide portable shared libs for our ppc # executables.", "testonly: arches = HOST_ARCHES else: arches = ALL_ARCHES service_enable =", "a path by removing unnecessary path-up segments and its corresponding", "Standard blaze visibility parameter. xdeps: External (file) dependencies of this", "utilize. deps: List of deps for this library arch: Which", "${name}_${arch}_tarball rule builds that .tar.gz package. Args: name: Base name", "sc_platform_filter(copts, [], arches), defines = defs_plus, includes = sc_platform_filter(includes, [],", "for the given portable binary and arches. Args: name: Analogous", "master switch - see _gen_proto_shims, above. * ${name}_${arch}_pb protobuf compile", "= None, default = None, visibility = None): \"\"\"Public macro", "alias for x86 builds. default: The result of the alias", "accum_flags.append( \"$$(if [[ -e $(GENDIR)/%s ]]; then echo -IG3LOC/$(GENDIR)/%s; fi)\"", "\"X86\", include_fmt % \"x86\", \"#elif defined(STRATUM_ARCH_%s)\" % \"HOST\", include_fmt %", "\"-fno-builtin-realloc\", \"-fno-builtin-free\", \"-D__STDC_FORMAT_MACROS=1\", # TODO(unknown): Figure out how we can", "into a usable library; note that ${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto.", "x86 == None): fail(\"Missing a select value for at least", "are decorated to keep them different and allow all to", "True, generate a python proto library from this rule. Any", "By cd-ing to another directory, we force protoc to produce", "grpc_shim_rule = None if (service_enable[\"grpc\"]): grpc_shim_rule = _gen_proto_shims( name =", "List of deps for this library arch: Which architecture to", "our proto might be in GENDIR! proto_path_cmds.append(\"; \".join([ \"if [[", "used for both sc_cc_lib and sc_proto_lib rules that depend on", "name, deps = None, srcs = None, hdrs = None,", "proto_rollup_flags + [ \"-I%s\" % protobuf_include, \"--grpc-cpp_out=%s\" % cpp_out, proto_src_loc,", "Add support for depending on normal proto_library rules. def sc_proto_lib(", "= grpc_shim_rule, ) if python_support: if proto_include: fail(\"Cannot use proto_include", "no selection is provided for a given platform and no", "= EMBEDDED_ARCHES + HOST_ARCHES # Identify Stratum platform arch for", "[s for s in srcs.to_list() if not s.endswith(\".h\")]: alwayslink =", "alwayslink, copts = sc_platform_filter(copts, [], arches), defines = defs_plus, includes", "True. Args: name: Base name for this library. srcs: List", ".proto Library. sc_cc_bin Declare a portable Binary. sc_package Declare a", "or default), config_label_prefix + \"ppc\": (ppc or default), config_label_prefix +", "provided for a given platform and no default is set,", "program-sizing build \"-Wall\", \"-Werror\", # Warn lots, and force fixing", "usable library; note that ${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto. Args: name:", "libs, etc. \"\"\" alwayslink = 0 deps = depset(deps or", "= [] grpc_gen_srcs = [] tools = [protoc_label] grpc_tools =", "both sc_cc_lib and sc_proto_lib rules that depend on this rule.", "}) # Generates an sc_platform_select based on a textual list", "= \"%s_%s\" % (src_stem, arch) temp_stem = \"%s/%s\" % (temp_prefix,", "# Generate messages gen_pb_h = gen_stem + \".pb.h\" gen_pb_cc =", "parameter. proto_include: Include path for generated sc_cc_libs. grpc_shim_rule: If needed,", "\" + dquote(pkg + \"/\" + hdr_stem + \".%s\" +", ") outs = [\"%s.tar.gz\" % name] # Copy our files", "architecture. Args: name: Base name for this library. pb_modifier: protoc", "decorate(label, suffix): if label.endswith(\":\"): # .../bar: -> .../bar label =", "Analogous to cc_test linkopts argument. visibility: Analogous to cc_test visibility", "only). testonly: Standard blaze testonly parameter. textual_hdrs: Analogous to cc_library.", "${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup collects include options for protoc: ${src}_${arch}_proto_rollup.flags Feed each", "] # # Build options for all embedded architectures #", "depset(copts or []) includes = depset(includes or []) data =", "suffix to a label, expanding implicit targets if needed. def", "= \"//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot\" # X86 ====================================================================== _X86_GRTE = \"//grte/v4_x86/release/usr/grte/v4\" # Portability", "inject from cmdline else kill feature. _TRACE_SRCS = False #", "_loc(gen_grpc_pb_cc)), ] grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc] native.genrule( name = src_arch", "\"-g\", # Don't use this for program-sizing build \"-Wall\", \"-Werror\",", "= ppc, x86 = x86, ), visibility = visibility, )", "= HOST_ARCHES else: arches = ALL_ARCHES service_enable = { \"grpc\":", "service_enable[\"grpc\"] = 1 elif service == \"rpc\": service_enable[\"grpc\"] = 1", "features _and_ GNU extensions. ] # Used for linking binaries.", "parameter. xdeps: External (file) dependencies of this library - no", "if dep.endswith(\"_proto\"): protoc_deps.append(\"%s_%s_headers\" % (dep, arch)) name_arch = decorate(name, arch)", "(label, label.split(\"/\")[-1], suffix) else: # bar -> bar_suffix return \"%s_%s\"", "return None def sc_platform_select(host = None, ppc = None, x86", "keep them different and allow all to be generated and", "visibility, ) # Embedded build definitions. ============================================== EMBEDDED_PPC = \"ppc\"", "= src_arch + \".grpc.pb\", srcs = protoc_srcs_set, outs = grpc_pb_outs,", "= name, size = size or \"small\", srcs = sc_platform_select(host", "'\"' # Adds squotes around a string. def squote(s): return", "{host,ppc,x86} that isn't specified. visibility: The visibility of the alias", "long as the arches are disjoint. sc_cc_lib( name = decorate(name,", "around a string. def dquote(s): return '\"' + s +", "respectively. * ${name}_${arch}_tarball rule builds that .tar.gz package. Args: name:", "deps argument. visibility: Standard blaze visibility argument. testonly: Standard blaze", "sc_cc_bin( name, deps = None, srcs = None, arches =", "proto_rollup_flags = [\"$${rollup}\"] if proto_include: # We'll be cd-ing to", "native.FilesetEntry( files = inputs, ), ] + [ native.FilesetEntry(srcdir =", "${src}.proto => ${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup collects include options for protoc: ${src}_${arch}_proto_rollup.flags", "in \" + \"sc_platform_select. Please add.\") config_label_prefix = \"//stratum:stratum_\" return", "isn't specified. visibility: The visibility of the alias target. \"\"\"", "Analogous to cc_test copts argument. linkopts: Analogous to cc_test linkopts", "to cc_test copts argument. linkopts: Analogous to cc_test linkopts argument.", "srcs, arches, visibility): \"\"\"Macro to build .pb.h multi-arch master switch", "\"fi\", ])) gendir_include = [\"-I$${g3}/$(GENDIR)\", \"-I$${g3}\", \"-I.\"] else: proto_src_loc =", "this, load() this file in a BUILD file, specifying the", "visibility = None): \"\"\"Public macro to create an alias that", "visibility = visibility, ) def _gen_proto_shims(name, pb_modifier, srcs, arches, visibility):", "else default, ppc = value if \"ppc\" in arches else", "[ \"-I$(GENDIR)\", ] # Used for C++ compiler invocations. _EMBEDDED_CXXFLAGS", "for embedded protobuf library. For every given ${src}.proto, generate: :${src}_${arch}.pb", "for flag in accum_flags] proto_rollup_cmds.append(\"cat $(SRCS)\") proto_rollup_cmd = \"{ %s;", "% \"HOST\", include_fmt % \"host\", \"#else\", \"#error Unknown STRATUM_ARCH\", \"#endif\",", "ppc = decorate(name, \"ppc\") if \"ppc\" in arches else None,", "_loc(name + \".tar.gz\"), \"rm -rf $${TEMP_DIR}\", ] native.genrule( name =", "_gen_proto_shims( name = decorate(name[:-6], \"grpc_proto\"), pb_modifier = \".grpc.pb\", srcs =", "may not use the proto_include field in this rule. services:", "a textual list of arches. def sc_platform_filter(value, default, arches): return", "is used instead. Args: name: The name of the alias", "arches): return sc_platform_select( host = value if \"host\" in arches", "= ALL_ARCHES service_enable = { \"grpc\": 0, } for service", "\".join(cmds), heuristic_label_expansion = 0, visibility = visibility, ) # Generate", "= None, defines = None, includes = None, include_prefix =", "= [] if proto_include: includes = [proto_include] # Note: Public", "= src_arch + \".pb\", srcs = protoc_srcs_set, outs = pb_outs,", "# We can't use $${PWD} until this step, because our", "for flags, libs, etc. \"\"\" alwayslink = 0 deps =", "Args: host: The value to use for host builds. ppc:", "of the data needed for this package and all dependency", "visibility = visibility, testonly = testonly, ) native.py_proto_library( name =", "sc_cc_lib to wrap them them up into a usable library;", "Used for all gcc invocations. _EMBEDDED_FLAGS = [ \"-O0\", #", "and ${name}_${arch}_data filesets containing respectively all of the binaries and", "it may seem: genrule doesn't have hdrs or deps #", "dependencies of this library - no decorations assumed, used and", "% protobuf_include, \"--grpc-cpp_out=%s\" % cpp_out, proto_src_loc, ]), \"cd $${g3}\", \"cp", "\"_grpc_proto\" grpc_dep_set = dep_set | [name] | _SC_GRPC_DEPS grpc_gen_hdrs_plus =", "]] || cd $(GENDIR)\" % proto_src_loc) gendir_include = [\"-I$(GENDIR)\", \"-I.\"]", "\"\"\"Public macro to build multi-arch library from Message protobuf(s). For", "visibility = visibility, ) my_proto_rollup = decorate(name_arch, \"proto_rollup.flags\") protoc_srcs_set =", "_loc(gen_grpc_pb_h)), \"cp %s.grpc.pb.cc %s\" % (temp_stem, _loc(gen_grpc_pb_cc)), ] grpc_pb_outs =", "= visibility, ) register_extension_info( extension_name = \"sc_cc_lib\", label_regex_for_dep = \"{extension_name}\",", "label_regex_for_dep = \"{extension_name}\", ) def sc_cc_bin( name, deps = None,", "../../dir/to/deeply/nested/path/../../../other/path will become ../../dir/to/other/path Args: path: A valid absolute or", "alter a blaze rule based on the target platform architecture.", "hdrs = [shim_rule] + hdrs, deps = deps, arch =", "visibility, ) # Generate GRPC if grpc_shim_rule: gen_grpc_pb_h = gen_stem", "= linkopts, visibility = visibility, ) register_extension_info( extension_name = \"sc_cc_test\",", "# Copyright 2018-present Open Networking Foundation # SPDX-License-Identifier: Apache-2.0 \"\"\"A", "flags to avoid generating # warnings. accum_flags.append( \"$$(if [[ -e", "def _loc(target): \"\"\"Return target location for constructing commands. Args: target:", "in deps: if dep.endswith(\"_proto\"): protoc_deps.append(\"%s_%s_headers\" % (dep, arch)) name_arch =", "default = []), deps = sc_platform_select( host = deps or", "_gen_proto_shims(name, pb_modifier, srcs, arches, visibility): \"\"\"Macro to build .pb.h multi-arch", "name available to this build. Returns: $(location target) \"\"\" return", "List of architectures to generate this way. copts: Analogous to", "a suffix to a label, expanding implicit targets if needed.", "TODO(unknown): if useful again then inject from cmdline else kill", "flags, libs, etc. \"\"\" alwayslink = 0 deps = depset(deps", "srcs = None, arches = None, copts = None, defines", "= \"%s/%s\" % (native.package_name(), proto_include) if full_proto_include: temp_prefix = \"%s/%s\"", "return shim_rule def _gen_py_proto_lib(name, srcs, deps, visibility, testonly): \"\"\"Creates a", "(host or default), config_label_prefix + \"ppc\": (ppc or default), config_label_prefix", "inputs, ), ] + [ native.FilesetEntry(srcdir = decorate(dep, extension)) for", "# compile this proto. native.filegroup( name = decorate(name_arch, \"headers\"), srcs", "the given srcs. There's no clean way to make python", "else: temp_prefix = \"%s/%s\" % (cpp_out, native.package_name()) proto_rollups = [", "files hdrs: More files to build into this library, but", "proto_path_cmds.append(\"; \".join([ \"if [[ -e %s ]]\" % (\"%s/%s\" %", "arch - \"ppc\". EMBEDDED_X86 Name of \"x86\" arch. HOST_ARCH Name", "= testonly, ) native.py_proto_library( name = py_name, api_version = 2,", "default or \"//stratum/portage:dummy\", host = host, ppc = ppc, x86", "an sc_platform_select based on a textual list of arches. def", "depset(srcs + hdrs), deps = deps, visibility = visibility, testonly", "if not filename.endswith(\".proto\"): continue hdr_stem = filename[0:-6] new_hdr_name = hdr_stem", "for b in bins.to_list()]), (\"data\", data), ]: native.Fileset( name =", "name = py_name, api_version = 2, deps = [regular_proto_name], visibility", "default, arches): return sc_platform_select( host = value if \"host\" in", "# Used for C++ compiler invocations. _EMBEDDED_CXXFLAGS = [ \"-std=gnu++11\",", "return select({ \"//conditions:default\": (host or default), config_label_prefix + \"ppc\": (ppc", "textual_plus = textual_hdrs | depset(deps.to_list()) cc_library( name = name, deps", "\"-Os\", # Use this for program-sizing build \"-g\", # Don't", "pb_outs, tools = tools, cmd = \" && \".join(cmds), heuristic_label_expansion", "sc_platform_select( default = [\"STRATUM_ARCH_HOST\"], ppc = [\"STRATUM_ARCH_PPC\"], x86 = [\"STRATUM_ARCH_X86\"],", "$(@)\" % \"; \".join(proto_rollup_cmds) native.genrule( name = decorate(name_arch, \"proto_rollup\"), srcs", "_gen_proto_shims( name = name, pb_modifier = \".pb\", srcs = srcs", "embedded arches. EMBEDDED_PPC Name of PowerPC arch - \"ppc\". EMBEDDED_X86", "testonly, ) native.py_proto_library( name = py_name, api_version = 2, deps", "= False # Used for all gcc invocations. _EMBEDDED_FLAGS =", "as the include paths used for both sc_cc_lib and sc_proto_lib", "grpc_tools = [protoc_label, grpc_plugin] protoc = \"$${g3}/%s\" % _loc(protoc_label) grpc_plugin", "of files (and file producing rules) to be packaged. deps:", "[] grpc_gen_hdrs = [] grpc_gen_srcs = [] tools = [protoc_label]", "arches else default, ) def sc_platform_alias( name, host = None,", "% (temp_stem, _loc(gen_pb_cc)), ] pb_outs = [gen_pb_h, gen_pb_cc] native.genrule( name", "sc_packages to add to this package. arches: Which architectures to", "\"; \".join(proto_rollup_cmds) native.genrule( name = decorate(name_arch, \"proto_rollup\"), srcs = proto_rollups,", "shim_rule = _gen_proto_shims( name = name, pb_modifier = \".pb\", srcs", "or \"small\", srcs = sc_platform_select(host = srcs or [], default", "with those as input. * ${name}_py a py_proto_library version of", "deps: List of deps for this library arch: Which architecture", "shim rule for use in follow-on hdrs and/or src lists.", "above, but also exported for dependent rules to utilize. deps:", "this for program-sizing build ] # PPC ====================================================================== _PPC_GRTE =", "= None, strip_include_prefix = None, data = None, testonly =", "in (\"\", \".\"): if result: continue elif d == \"..\":", "a different ppc toolchain for Stratum. # This means that", "\".tar.gz\"), \"rm -rf $${TEMP_DIR}\", ] native.genrule( name = decorate(name, \"tarball\"),", "most contexts to alter a blaze rule based on the", "to be generated and addressed independently. This aspect of the", "hdrs: Analogous to cc_library hdrs argument. arches: List of architectures", "etc. \"\"\" alwayslink = 0 deps = depset(deps or [])", "Copyright 2018-present Open Networking Foundation # SPDX-License-Identifier: Apache-2.0 \"\"\"A portable", "load(\"@rules_proto//proto:defs.bzl\", \"proto_library\") load(\"@rules_cc//cc:defs.bzl\", \"cc_binary\", \"cc_library\", \"cc_test\") # Generic path &", "[ \" \".join([protoc] + gendir_include + proto_rollup_flags + [ \"-I%s\"", "make python protos work with sc_proto_lib's proto_include field, so we", "% name] # Copy our files into a temporary directory", "out = name, entries = [ native.FilesetEntry( srcdir = decorate(name,", "is more general than it may seem: genrule doesn't have", "line in lines] new_hdr_loc = \"$(location %s)\" % new_hdr_name cmds.append(\"{", "argument. copts: Analogous to cc_test copts argument. linkopts: Analogous to", "grpc_shim_rule: If needed, the name of the grpc shim for", "the target platform architecture. If no selection is provided for", "use the proto_include field in this rule. services: List of", "(native.package_name(), src) if proto_src_loc.startswith(full_proto_include + \"/\"): proto_src_loc = proto_src_loc[len(full_proto_include) +", "to build this library for, None => EMBEDDED_ARCHES (HOST_ARCHES not", "Generic path & label helpers. ============================================ def _normpath(path): \"\"\"Normalize a", "protos work with sc_proto_lib's proto_include field, so we keep this", "argument. data: Analogous to cc_test data argument. defines: Analogous to", "srcs argument. deps: Standard blaze deps argument. visibility: Standard blaze", "our files into a temporary directory and make any necessary", "s + '\"' # Adds squotes around a string. def", "another forge server. proto_path_cmds = [\"rollup=$$(sed \\\"s,G3LOC,$${PWD},g\\\" %s)\" % _loc(my_proto_rollup)]", "data for deployment. For package \"name\", generates: * ${name}_${arch}_bin and", "than it may seem: genrule doesn't have hdrs or deps", "dep_set | [name] | _SC_GRPC_DEPS grpc_gen_hdrs_plus = grpc_gen_hdrs + gen_hdrs", "= testonly, textual_hdrs = gen_hdrs, visibility = visibility, ) if", ") accum_flags.append( \"$$(if [[ -e %s ]]; then echo -IG3LOC/%s;", "= visibility, ) grpc_shim_rule = None if (service_enable[\"grpc\"]): grpc_shim_rule =", "to make python protos work with sc_proto_lib's proto_include field, so", "{ \"grpc\": 0, } for service in services or []:", "path-up segments and its corresponding directories. Providing own implementation because", "Returns: The requested selector. \"\"\" if default == None and", "flag in accum_flags] proto_rollup_cmds.append(\"cat $(SRCS)\") proto_rollup_cmd = \"{ %s; }", "[ \"TEMP_DIR=$(@D)/stratum_packaging_temp\", \"mkdir $${TEMP_DIR}\", \"cp -r %s $${TEMP_DIR}/tarball\" % _loc(fileset_name),", "Normalizes a path by removing unnecessary path-up segments and its", "specific files to the final tarball. platform_entries = sc_platform_select( #", "visibility, ) outs = [\"%s.tar.gz\" % name] # Copy our", "% (native.package_name(), proto_include) if full_proto_include: temp_prefix = \"%s/%s\" % (cpp_out,", "arches = ALL_ARCHES defs_plus = (defines or []) + _ARCH_DEFINES", "declaring Stratum internal visibility. The macros are like cc_library(), proto_library(),", "Generates a blaze alias that will select the appropriate target.", "no selection is provided for a given platform, {default} is", "pkg, filename = parse_label(src) if not filename.endswith(\".proto\"): continue hdr_stem =", "List of files (and file producing rules) to be packaged.", "rules - one for each arch. * sc_cc_lib(name) with those", "gen_grpc_pb_cc = gen_stem + \".grpc.pb.cc\" grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc) cmds = bash_vars", "[] tools = [protoc_label] grpc_tools = [protoc_label, grpc_plugin] protoc =", "(host builds only). testonly: Standard blaze testonly parameter. textual_hdrs: Analogous", "-e %s ]]\" % (\"%s/%s\" % (full_proto_include, proto_src_loc)), \"then cd", "# Portability definitions =================================================== def sc_cc_test( name, size = None,", "this library. srcs: List of .proto files - private to", "depset(xdeps or []) copts = depset(copts or []) includes =", "deps: Analogous to cc_binary deps argument. srcs: Analogous to cc_binary", "\"$${g3}/%s\" % _loc(protoc_label) grpc_plugin = \"$${g3}/%s\" % _loc(grpc_plugin) cpp_out =", "# SPDX-License-Identifier: Apache-2.0 \"\"\"A portable build system for Stratum P4", "for any of {host,ppc,x86} that isn't specified. visibility: The visibility", "filesets containing respectively all of the binaries and all of", "a relative filename from a label, replacing \"//\" and \":\".", "given ${src}.proto, generate: :${src}_${arch}.pb rule to run protoc ${src}.proto =>", "The value to use for any of {host,ppc,x86} that isn't", "not arches: arches = EMBEDDED_ARCHES fileset_name = decorate(name, \"fs\") for", "a select value for at least one platform in \"", "[ \"-O0\", # Don't use this for program-sizing build #--", "\".\" python_support: Defaults to False. If True, generate a python", "be used in most contexts to alter a blaze rule", "definitions. ============================================== EMBEDDED_PPC = \"ppc\" EMBEDDED_X86 = \"x86\" EMBEDDED_ARCHES =", "= [\"//stratum/portage:dummy_with_main\"], ), data = data or [], defines =", "internal visibility. The macros are like cc_library(), proto_library(), and cc_binary(),", "-1 continue else: level += 1 result.append(d) return sep.join(result) #", "% protobuf_label protobuf_srcs = [protobuf_hdrs] protobuf_include = \"$${g3}/protobuf/src\" if arch", "python_support: if proto_include: fail(\"Cannot use proto_include on an sc_proto_lib with", "filename.endswith(\".proto\"): continue hdr_stem = filename[0:-6] new_hdr_name = hdr_stem + hdr_ext", "sc_cc_lib( name = decorate(name, arch), deps = dep_set, srcs =", "grpc_shim_rule: gen_grpc_pb_h = gen_stem + \".grpc.pb.h\" gen_grpc_pb_cc = gen_stem +", "+ [ \" \".join([ protoc, \"--plugin=protoc-gen-grpc-cpp=%s\" % grpc_plugin, ] +", "grpc_gen_hdrs = [] grpc_gen_srcs = [] tools = [protoc_label] grpc_tools", "use this filegroup to accumulate the set of .proto files", "or []) + _ARCH_DEFINES textual_plus = textual_hdrs | depset(deps.to_list()) cc_library(", "Name of default \"host\" arch. HOST_ARCHES All host arches. STRATUM_INTERNAL", "or []) deps = depset(deps or []) if not arches:", "embedded arch is selected. During embedded builds this target will", "each src path.proto, generates path.pb.h consisting of: #ifdef logic to", "\"register_extension_info\", ) load(\"@rules_proto//proto:defs.bzl\", \"proto_library\") load(\"@rules_cc//cc:defs.bzl\", \"cc_binary\", \"cc_library\", \"cc_test\") # Generic", "strip_include_prefix, testonly = testonly, textual_hdrs = sc_platform_filter( textual_plus | xdeps,", "relative path to normalize. Returns: A path equivalent to the", "api_version = 2, deps = [regular_proto_name], visibility = visibility, testonly", "avoid generating # warnings. accum_flags.append( \"$$(if [[ -e $(GENDIR)/%s ]];", "= [\"$${rollup}\"] if proto_include: # We'll be cd-ing to another", "rules to be packaged. data: List of files (and file", "if \"x86\" in arches else default, ) def sc_platform_alias( name,", "service == \"grpc\": service_enable[\"grpc\"] = 1 elif service == \"rpc\":", "\"%s_%s\" % (label, suffix) elif label.startswith(\"//\"): # //foo/bar -> //foo/bar:bar_suffix", "= includes, testonly = testonly, textual_hdrs = gen_hdrs, visibility =", "else: arches = ALL_ARCHES service_enable = { \"grpc\": 0, }", "cmdline else kill feature. _TRACE_SRCS = False # Used for", "for our ppc # executables. ppc = [native.FilesetEntry( srcdir =", "cc_test rule that interacts safely with Stratum builds. Generates a", "Identify Stratum platform arch for .pb.h shims and other portability", "file, specifying the symbols needed. The public symbols are the", "[name] | _SC_GRPC_DEPS grpc_gen_hdrs_plus = grpc_gen_hdrs + gen_hdrs sc_cc_lib( name", "or [], default = []), deps = sc_platform_select( host =", "(and file producing rules) to be packaged. deps: List of", "Allow C++11 features _and_ GNU extensions. ] # Used for", "[]) hdrs = depset(hdrs or []) xdeps = depset(xdeps or", "Don't use this for program-sizing build \"-Wall\", \"-Werror\", # Warn", "be packaged. deps: List of other sc_packages to add to", "\"{ %s; } | sort -u -o $(@)\" % \";", "arch), \"proto_rollup.flags\") for dep in deps if dep.endswith(\"_proto\") ] proto_rollup_cmds", "\"cp %s.grpc.pb.h %s\" % (temp_stem, _loc(gen_grpc_pb_h)), \"cp %s.grpc.pb.cc %s\" %", "level += -1 continue else: level += 1 result.append(d) return", "])) gendir_include = [\"-I$${g3}/$(GENDIR)\", \"-I$${g3}\", \"-I.\"] else: proto_src_loc = \"%s/%s\"", "new_hdr_name = hdr_stem + hdr_ext outs.append(new_hdr_name) # Generate lines for", "= parse_label(src) if not filename.endswith(\".proto\"): continue hdr_stem = filename[0:-6] new_hdr_name", "name: The name of the alias target. host: The result", "squote(s): return \"'\" + s + \"'\" # Emulate Python", "Args: name: Analogous to cc_test name argument. size: Analogous to", "textual_hdrs = None, visibility = None, xdeps = None): \"\"\"Creates", "will select the appropriate proto target based on the currently", "for .pb.h shims and other portability hacks. _ARCH_DEFINES = sc_platform_select(", "shim_rule, srcs = srcs, outs = outs, cmd = \"", "of augmenting context with a user defined configuration fragment would", "on a textual list of arches. def sc_platform_filter(value, default, arches):", "visibility parameter. \"\"\" deps = depset(deps or []) srcs =", "to cc_library. visibility: Standard blaze visibility parameter. xdeps: External (file)", "select({ \"//conditions:default\": (host or default), config_label_prefix + \"ppc\": (ppc or", "testonly: Standard blaze testonly parameter. visibility: Standard blaze visibility parameter.", "for this library. pb_modifier: protoc plugin-dependent file extension (e.g.: .pb)", "in deps.to_list() ], visibility = visibility, ) # Add any", "and arches. Args: name: Analogous to cc_binary name argument. deps:", "If True, generate a python proto library from this rule.", "dependency packages. * ${name}_${arch} fileset containing the corresponding bin and", "or [], default = [\"//stratum/portage:dummy_with_main\"], ), data = data or", "or []) hdrs = depset(hdrs or []) xdeps = depset(xdeps", "Args: name: Base name for this package. bins: List of", "confuse blaze. \"-fno-builtin-malloc\", # We'll use tcmalloc \"-fno-builtin-calloc\", \"-fno-builtin-realloc\", \"-fno-builtin-free\",", "subsequent rules. testonly: Standard blaze testonly parameter. proto_include: Path to", "0 deps = depset(deps or []) srcs = depset(srcs or", "expand inside squotes, so quote accordingly. include_fmt = \"#include \"", "= arch, visibility = visibility, testonly = testonly, proto_include =", "bin and data filesets, mapped to bin/ and share/ respectively.", "rule. services: List of services to enable {\"grpc\", \"rpc\"}; Only", "protoc, as well as the include paths used for both", "EMBEDDED_ARCHES: grpc_plugin = _SC_GRPC_PLUGIN else: grpc_plugin = _GRPC_PLUGIN protoc_deps =", "\".join([protoc] + gendir_include + proto_rollup_flags + [ \"-I%s\" % protobuf_include,", "doesn't break the build when an embedded arch is selected.", "macro to package binaries and data for deployment. For package", "sc_proto_libs that also have python support, and may not use", "protoc_deps + protobuf_srcs + [my_proto_rollup]) gen_srcs = [] gen_hdrs =", "for sc_proto_lib. For each src path.proto, generates path.pb.h consisting of:", "shared libs for our ppc # executables. ppc = [native.FilesetEntry(", "the corresponding bin and data filesets, mapped to bin/ and", "GENDIR! proto_path_cmds.append(\"; \".join([ \"if [[ -e %s ]]\" % (\"%s/%s\"", "\"$(location %s)\" % new_hdr_name cmds.append(\"{ %s; } > %s\" %", "\".h\" for src in srcs: pkg, filename = parse_label(src) if", "sc_cc_lib( name = decorate(grpc_name, arch), deps = grpc_dep_set, srcs =", "service == \"rpc\": service_enable[\"grpc\"] = 1 else: fail(\"service='%s' not in", "+ hdrs, arches = arches, visibility = visibility, ) for", "py_proto_library from the given srcs. There's no clean way to", "proto_include: full_proto_include = \"%s/%s\" % (native.package_name(), proto_include) if full_proto_include: temp_prefix", "grpc_shim_rule): \"\"\"Creates rules and filegroups for embedded protobuf library. For", "decorate(name, \"fs\") for extension, inputs in [ (\"bin\", [\"%s.stripped\" %", "minimal use of path-up segments. Invalid input paths will stay", "src in srcs + hdrs: if src.endswith(\".proto\"): src_stem = src[0:-6]", "this once per (listed) arch; # which then calls sc_cc_lib", "= None): \"\"\"Creates a cc_test rule that interacts safely with", "b in bins.to_list()]), (\"data\", data), ]: native.Fileset( name = decorate(fileset_name,", "defs_plus, includes = includes, linkopts = [\"-ldl\", \"-lutil\"], testonly =", "_gen_py_proto_lib( name = name, srcs = depset(srcs + hdrs), deps", "Symbols added as \"-D\" compilation options. includes: Paths to add", "restrictions. The key difference: you can supply lists of architectures", "data = depset(data or []) deps = depset(deps or [])", ":${src}_${arch}.grpc.pb rule to run protoc w/ erpc plugin: ${src}.proto =>", "default \"host\" arch. HOST_ARCHES All host arches. STRATUM_INTERNAL For declaring", "through to subsequent rules. testonly: Standard blaze testonly parameter. proto_include:", "$(location target) \"\"\" return \"$(location %s)\" % target def _gen_proto_lib(", "messages gen_pb_h = gen_stem + \".pb.h\" gen_pb_cc = gen_stem +", "default, ppc = value if \"ppc\" in arches else default,", "= [] full_proto_include = None if proto_include == \".\": full_proto_include", "erpc plugin: ${src}.proto => ${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup collects include options for", "= [regular_proto_name], visibility = visibility, testonly = testonly, ) #", "\" && \".join(cmds), heuristic_label_expansion = 0, visibility = visibility, )", "$${TEMP_DIR}/tarball/bin ]]\", \"then for f in $${TEMP_DIR}/tarball/bin/*.stripped\", \" do mv", "in $${TEMP_DIR}/tarball/bin/*.stripped\", \" do mv $${f} $${f%.stripped}\", # rename not", "load(\"//tools/build_defs/label:def.bzl\", \"parse_label\") load( \"//devtools/build_cleaner/skylark:build_defs.bzl\", \"register_extension_info\", ) load(\"@rules_proto//proto:defs.bzl\", \"proto_library\") load(\"@rules_cc//cc:defs.bzl\", \"cc_binary\",", "before tarballing. cmds = [ \"TEMP_DIR=$(@D)/stratum_packaging_temp\", \"mkdir $${TEMP_DIR}\", \"cp -r", "= \"@com_google_protobuf//:protobuf\" _SC_GRPC_PLUGIN = \"//sandblaze/prebuilt/protobuf:grpc_cpp_plugin\" _GRPC_PLUGIN = \"//grpc:grpc_cpp_plugin\" def _loc(target):", "private to this library. hdrs: As above, but also exported", "${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto. Args: name: Base name for this", "# TODO(unknown): Figure out how we can use $(CC_FLAGS) instead", "\"host\", \"#else\", \"#error Unknown STRATUM_ARCH\", \"#endif\", ] gen_cmds = [(\"printf", "= testonly, ) # TODO(unknown): Add support for depending on", "\" do mv $${f} $${f%.stripped}\", # rename not available. \"done\",", "our rollup command # might be generated on another forge", "portable library and arches. Args: name: Analogous to cc_library name", "extension, inputs in [ (\"bin\", [\"%s.stripped\" % b for b", ") # Protobuf ================================================================= _SC_GRPC_DEPS = [ \"//sandblaze/prebuilt/grpc\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_base\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib\",", "cc_binary srcs argument. arches: List of architectures to generate this", "be cd-ing to another directory before protoc, so # adjust", "Python 2.5+ str(startswith([prefix ...]) def starts_with(s, prefix_list): for prefix in", "+ '\"' # Adds squotes around a string. def squote(s):", "will affect the symbols generated by protoc, as well as", "library arch: Which architecture to build this library for. visibility:", "None => EMBEDDED_ARCHES (HOST_ARCHES not generally supported). visibility: Standard blaze", "path. Normalizes a path by removing unnecessary path-up segments and", "False # Used for all gcc invocations. _EMBEDDED_FLAGS = [", "default target is used instead. Args: name: The name of", "data: Files to provide as data at runtime (host builds", "calls are OK as long as the arches are disjoint.", "packages. * ${name}_${arch} fileset containing the corresponding bin and data", "\"ppc\" in arches else default, x86 = value if \"x86\"", "to add to this package. arches: Which architectures to build", "There's no clean way to make python protos work with", "Open Networking Foundation # SPDX-License-Identifier: Apache-2.0 \"\"\"A portable build system", "(full_proto_include, proto_src_loc)), \"then cd %s\" % full_proto_include, \"else cd $(GENDIR)/%s\"", "if label.startswith(\"//\"): # //foo/bar:bat/baz -> google3_foo/bar/bat/baz return label.replace(\"//\", \"google3/\").replace(\":\", \"/\")", "C and C++ compiler invocations. _EMBEDDED_CFLAGS = [ \"-I$(GENDIR)\", ]", "%s\" % (temp_stem, _loc(gen_pb_h)), \"cp %s.pb.cc %s\" % (temp_stem, _loc(gen_pb_cc)),", "no default is set, a dummy default target is used", "bash_vars + [ \"mkdir -p %s\" % temp_prefix, ] +", "data: Analogous to cc_test data argument. defines: Analogous to cc_test", "builds. default: The value to use for any of {host,ppc,x86}", "proto_rollups = [ decorate(decorate(dep, arch), \"proto_rollup.flags\") for dep in deps", "decorate(dep, extension)) for dep in deps.to_list() ], visibility = visibility,", "] gen_cmds = [(\"printf '%%s\\\\n' '%s'\" % line) for line", "with these include flags to avoid generating # warnings. accum_flags.append(", "aka .pb.h master switch - see _gen_proto_shims, above. * ${name}_${arch}_pb", "_ARCH_DEFINES = sc_platform_select( default = [\"STRATUM_ARCH_HOST\"], ppc = [\"STRATUM_ARCH_PPC\"], x86", "= [] grpc_gen_hdrs = [] grpc_gen_srcs = [] tools =", "% (temp_stem, _loc(gen_grpc_pb_h)), \"cp %s.grpc.pb.cc %s\" % (temp_stem, _loc(gen_grpc_pb_cc)), ]", "= visibility, testonly = testonly, ) # TODO(unknown): Add support", "[]) xdeps = depset(xdeps or []) copts = depset(copts or", "rules. def sc_proto_lib( name = None, srcs = [], hdrs", "protoc, so # adjust our .proto path accordingly. proto_src_loc =", "= decorate(name, \"tarball\"), srcs = [\":%s\" % fileset_name], outs =", "library from Message protobuf(s). For library \"name\", generates: * ${name}_shim", "accordingly. include_fmt = \"#include \" + dquote(pkg + \"/\" +", "+ gen_hdrs sc_cc_lib( name = decorate(grpc_name, arch), deps = grpc_dep_set,", "toolchain for Stratum. # This means that we must provide", "deps if dep.endswith(\"_proto\") ] proto_rollup_cmds = [\"printf '%%s\\n' %s\" %", "this library for, None => ALL. visibility: Standard blaze visibility", "defs_plus, includes = sc_platform_filter(includes, [], arches), include_prefix = include_prefix, strip_include_prefix", "server. proto_path_cmds = [\"rollup=$$(sed \\\"s,G3LOC,$${PWD},g\\\" %s)\" % _loc(my_proto_rollup)] proto_rollup_flags =", "each arch; # multiple such calls are OK as long", "architectures: ppc x86 \"\"\" load(\"//tools/build_defs/label:def.bzl\", \"parse_label\") load( \"//devtools/build_cleaner/skylark:build_defs.bzl\", \"register_extension_info\", )", "feature. _TRACE_SRCS = False # Used for all gcc invocations.", "== \".\": full_proto_include = native.package_name() elif proto_include: full_proto_include = \"%s/%s\"", "(label, suffix) # Creates a relative filename from a label,", "size or \"small\", srcs = sc_platform_select(host = srcs or [],", "% (temp_prefix, src_stem) gen_stem = \"%s.%s\" % (src_stem, arch) #", "\"done\", \"fi\", \"tar czf %s -h -C $${TEMP_DIR}/tarball .\" %", "= _GRPC_PLUGIN protoc_deps = [] for dep in deps: if", "path to normalize. Returns: A path equivalent to the input", "+ proto_path_cmds + [ \" \".join([ protoc, \"--plugin=protoc-gen-grpc-cpp=%s\" % grpc_plugin,", "plugin-dependent file extension (e.g.: .pb) srcs: List of proto files.", "We use this filegroup to accumulate the set of .proto", "* ${name}_py, a py_proto_library based on ${name}_default_pb. Args: name: Standard", "alwayslink = 0 deps = depset(deps or []) srcs =", "name = name, srcs = depset(srcs + hdrs), deps =", "\"-fno-builtin-free\", \"-D__STDC_FORMAT_MACROS=1\", # TODO(unknown): Figure out how we can use", "= data or [], defines = defines, copts = copts,", "\"x86\" EMBEDDED_ARCHES = [ EMBEDDED_PPC, EMBEDDED_X86, ] HOST_ARCH = \"host\"", "(native.package_name(), proto_include) if full_proto_include: temp_prefix = \"%s/%s\" % (cpp_out, native.package_name()[len(full_proto_include):])", "\".\"): if result: continue elif d == \"..\": if level", "on normal proto_library rules. def sc_proto_lib( name = None, srcs", "all embedded dependencies appear as a `src'. # TODO(unknown): if", "needed to # compile this proto. native.filegroup( name = decorate(name_arch,", "sc_platform_filter( textual_plus | xdeps, [], arches, ), data = sc_platform_filter(data,", "= grpc_dep_set, srcs = grpc_gen_srcs, hdrs = hdrs + grpc_gen_hdrs_plus", "\"parse_label\") load( \"//devtools/build_cleaner/skylark:build_defs.bzl\", \"register_extension_info\", ) load(\"@rules_proto//proto:defs.bzl\", \"proto_library\") load(\"@rules_cc//cc:defs.bzl\", \"cc_binary\", \"cc_library\",", "= \"$${g3}/$(GENDIR)/%s/%s\" % (native.package_name(), arch) accum_flags = [] full_proto_include =", "\".grpc.pb.cc\" grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc) cmds = bash_vars + [ \"mkdir -p", "add to include path. This will affect the symbols generated", "library and arches. Args: name: Analogous to cc_library name argument.", "so # adjust our .proto path accordingly. proto_src_loc = \"%s/%s\"", "in arches else None, visibility = visibility, ) return shim_rule", "= None, python_support = False, services = []): \"\"\"Public macro", "argument. defines: Symbols added as \"-D\" compilation options. includes: Paths", "and confuse blaze. \"-fno-builtin-malloc\", # We'll use tcmalloc \"-fno-builtin-calloc\", \"-fno-builtin-realloc\",", "]), \"cd $${g3}\", \"cp %s.grpc.pb.h %s\" % (temp_stem, _loc(gen_grpc_pb_h)), \"cp", "= [], visibility = None, testonly = None, proto_include =", "we must provide portable shared libs for our ppc #", "this rule. Typically \".\" python_support: Defaults to False. If True,", "== None): fail(\"Missing a select value for at least one", "================================================================= _SC_GRPC_DEPS = [ \"//sandblaze/prebuilt/grpc\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_base\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib\", ] _PROTOC =", "[\"STRATUM_ARCH_HOST\"], ppc = [\"STRATUM_ARCH_PPC\"], x86 = [\"STRATUM_ARCH_X86\"], ) STRATUM_INTERNAL =", "-r %s $${TEMP_DIR}/tarball\" % _loc(fileset_name), \"if [[ -e $${TEMP_DIR}/tarball/bin ]]\",", "= (srcs + hdrs + protoc_deps + protobuf_srcs + [my_proto_rollup])", "\"\"\"Public macro to create an alias that changes based on", "} > %s\" % (\" && \".join(gen_cmds), new_hdr_loc)) shim_rule =", "srcs. There's no clean way to make python protos work", "portable tarball package. and the variables/lists: ALL_ARCHES All known arches.", "proto_rollup_cmds = [\"printf '%%s\\n' %s\" % flag for flag in", "includes, testonly = testonly, textual_hdrs = gen_hdrs, visibility = visibility,", "of shim rule for use in follow-on hdrs and/or src", "\"cc_test\") # Generic path & label helpers. ============================================ def _normpath(path):", "switch file. # Lines expand inside squotes, so quote accordingly.", "+ [ \" \".join([protoc] + gendir_include + proto_rollup_flags + [", "removing unnecessary path-up segments and its corresponding directories. Providing own", "linkopts = [\"-ldl\", \"-lutil\"], testonly = testonly, visibility = visibility,", "architectures to build this library for, None => ALL. visibility:", "macro to build multi-arch library from Message protobuf(s). For library", "[\"-I$${g3}/$(GENDIR)\", \"-I$${g3}\", \"-I.\"] else: proto_src_loc = \"%s/%s\" % (native.package_name(), src)", ") # Add any platform specific files to the final", "defs_plus = (defines or []) + _ARCH_DEFINES textual_plus = textual_hdrs", "decorate(name_arch, \"headers\"), srcs = hdrs + protoc_deps, visibility = visibility,", "%s\" % (full_proto_include, proto_src_loc)) # By cd-ing to another directory,", "[ \"-I%s\" % protobuf_include, \"--grpc-cpp_out=%s\" % cpp_out, proto_src_loc, ]), \"cd", "or default), config_label_prefix + \"x86\": (x86 or default), }) #", "None, x86 = decorate(name, \"x86\") if \"x86\" in arches else", "not arches: arches = ALL_ARCHES defs_plus = (defines or [])", "depset(deps or []) srcs = depset(srcs or []) hdrs =", "library \"name\", generates: * ${name}_shim aka .pb.h master switch -", ") register_extension_info( extension_name = \"sc_proto_lib\", label_regex_for_dep = \"{extension_name}\", ) def", "mangle paths and confuse blaze. \"-fno-builtin-malloc\", # We'll use tcmalloc", "sc_platform_filter(deps, [], arches), srcs = sc_platform_filter(srcs, [], arches), hdrs =", "to cc_binary copts argument. defines: Symbols added as \"-D\" compilation", "# adjust our .proto path accordingly. proto_src_loc = \"%s/%s\" %", "proto lib. \"\"\" bash_vars = [\"g3=$${PWD}\"] # TODO(unknown): Switch protobuf", "None, defines = None, includes = None, include_prefix = None,", "mv $${f} $${f%.stripped}\", # rename not available. \"done\", \"fi\", \"tar", "deps = deps, arch = arch, visibility = visibility, testonly", "build \"-Wall\", \"-Werror\", # Warn lots, and force fixing warnings.", "options for all embedded architectures # # Set _TRACE_SRCS to", "visibility: The visibility of the alias target. \"\"\" native.alias( name", "result of the alias for host builds. ppc: The result", "arch) accum_flags = [] full_proto_include = None if proto_include ==", "native.FilesetEntry( srcdir = decorate(name, \"data\"), destdir = \"share\", ), ]", "default == None and (host == None or ppc ==", "% new_hdr_name cmds.append(\"{ %s; } > %s\" % (\" &&", "decorate(path) sc_cc_lib Declare a portable Library. sc_proto_lib Declare a portable", "deps = depset(deps or []) srcs = depset(srcs or [])", "of the alias for any of {host,ppc,x86} that isn't specified.", "Returns: Name of shim rule for use in follow-on hdrs", "= \"sc_cc_bin\", label_regex_for_dep = \"{extension_name}\", ) # Protobuf ================================================================= _SC_GRPC_DEPS", "= depset(textual_hdrs or []) if srcs: if [s for s", "src_stem = src[0:-6] src_arch = \"%s_%s\" % (src_stem, arch) temp_stem", "visibility parameter, passed through to all filesets. \"\"\" bins =", "src_arch = \"%s_%s\" % (src_stem, arch) temp_stem = \"%s/%s\" %", "accum_flags] proto_rollup_cmds.append(\"cat $(SRCS)\") proto_rollup_cmd = \"{ %s; } | sort", "& label helpers. ============================================ def _normpath(path): \"\"\"Normalize a path. Normalizes", "if src.endswith(\".proto\"): src_stem = src[0:-6] src_arch = \"%s_%s\" % (src_stem,", "Stratum. # This means that we must provide portable shared", "visibility = visibility, testonly = testonly, ) register_extension_info( extension_name =", "every target platform. Args: host: The value to use for", "x86: The result of the alias for x86 builds. default:", "for which they should be compiled - defaults to all", "arches), hdrs = sc_platform_filter(hdrs, [], arches), alwayslink = alwayslink, copts", "the build when an embedded arch is selected. During embedded", "hdrs), deps = deps, visibility = visibility, testonly = testonly,", "set, a dummy default target is used instead. Args: name:", "options. includes: Paths to add as \"-I\" compilation options. include_prefix:", "specified. Returns: The requested selector. \"\"\" if default == None", "services or []: if service == \"grpc\": service_enable[\"grpc\"] = 1", "to a label, expanding implicit targets if needed. def decorate(label,", "options and some restrictions. The key difference: you can supply", "= srcs or [], default = []), deps = sc_platform_select(", ".proto path accordingly. proto_src_loc = \"%s/%s\" % (native.package_name(), src) if", "] # PPC ====================================================================== _PPC_GRTE = \"//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot\" # X86 ======================================================================", "\"\"\"Creates a py_proto_library from the given srcs. There's no clean", "visibility argument. testonly: Standard blaze testonly argument. \"\"\" regular_proto_name =", "proto_rollup_cmd = \"{ %s; } | sort -u -o $(@)\"", "x86 = decorate(name, \"x86\") if \"x86\" in arches else None,", "embedded protobuf library. For every given ${src}.proto, generate: :${src}_${arch}.pb rule", "accumulate the set of .proto files needed to # compile", "path accordingly. proto_src_loc = \"%s/%s\" % (native.package_name(), src) if proto_src_loc.startswith(full_proto_include", "native.py_proto_library( name = py_name, api_version = 2, deps = [regular_proto_name],", "and exported as header, not for flags, libs, etc. \"\"\"", "system is suboptimal - something along the lines of augmenting", "0: result.pop() level += -1 continue else: level += 1", "compile steps. # This is more general than it may", "=================================================== def sc_cc_test( name, size = None, srcs = None,", "+ dquote(pkg + \"/\" + hdr_stem + \".%s\" + hdr_ext)", "of other sc_packages to add to this package. arches: Which", "This will affect the symbols generated by protoc, as well", "\"cd $${g3}\", \"cp %s.pb.h %s\" % (temp_stem, _loc(gen_pb_h)), \"cp %s.pb.cc", "use $(CC_FLAGS) instead of this. \"-D__GOOGLE_STL_LEGACY_COMPATIBILITY\", ] # Used for", "protoc_label = _PROTOC protobuf_label = _PROTOBUF protobuf_hdrs = \"%s:well_known_types_srcs\" %", "which they should be compiled - defaults to all if", "blaze alias that will select the appropriate target. If no", "to select path.${arch}.pb.h Also generates an alias that will select", "destdir = \"share\", ), ] + platform_entries, visibility = visibility,", "strip_include_prefix = None, data = None, testonly = None, textual_hdrs", "native.genrule( name = shim_rule, srcs = srcs, outs = outs,", "so we keep this simple. For library \"name\", generates: *", "[]) shim_rule = _gen_proto_shims( name = name, pb_modifier = \".pb\",", "seem: genrule doesn't have hdrs or deps # attributes, so", "with sc_proto_lib's proto_include field, so we keep this simple. For", "for src in srcs + hdrs: if src.endswith(\".proto\"): src_stem =", "we force protoc to produce # different symbols. Careful, our", "= None, deps = None, arches = None, visibility =", "prefix in prefix_list: if s.startswith(prefix): return prefix return None def", "path by removing unnecessary path-up segments and its corresponding directories.", "then echo -IG3LOC/$(GENDIR)/%s; fi)\" % (full_proto_include, full_proto_include), ) accum_flags.append( \"$$(if", "visibility = visibility, ) register_extension_info( extension_name = \"sc_cc_bin\", label_regex_for_dep =", "build this library for, None => EMBEDDED_ARCHES (HOST_ARCHES not generally", "srcs = sc_platform_filter(srcs, [], arches), hdrs = sc_platform_filter(hdrs, [], arches),", "\"%s/%s\" % (native.package_name(), proto_include) if full_proto_include: temp_prefix = \"%s/%s\" %", "else: print(\"Invalid proto include '%s' doesn't match src %s\" %", "= [\":libs\"], destdir = \"lib/stratum\", symlinks = \"dereference\", )], default", "- defaults to all if left unstated. Internally, libraries and", "None, defines = None, copts = None, linkopts = None,", "= sc_platform_filter(data, [], arches), visibility = visibility, ) register_extension_info( extension_name", "visibility, testonly = testonly, ) for src in srcs +", "clean way to make python protos work with sc_proto_lib's proto_include", "]]\", \"then for f in $${TEMP_DIR}/tarball/bin/*.stripped\", \" do mv $${f}", "$${TEMP_DIR}/tarball .\" % _loc(name + \".tar.gz\"), \"rm -rf $${TEMP_DIR}\", ]", "None): \"\"\"Public macro to create an alias that changes based", "of deps for this library arches: Which architectures to build", "or []) copts = depset(copts or []) includes = depset(includes", "\"$$(if [[ -e $(GENDIR)/%s ]]; then echo -IG3LOC/$(GENDIR)/%s; fi)\" %", "# By cd-ing to another directory, we force protoc to", "another directory, we force protoc to produce # different symbols.", "bins.to_list()]), (\"data\", data), ]: native.Fileset( name = decorate(fileset_name, extension), out", "defines = defines, copts = copts, linkopts = linkopts, visibility", "depset(includes or []) data = depset(data or []) textual_hdrs =", "Base name for this library. srcs: List of .proto files", "$(GENDIR)\" % proto_src_loc) gendir_include = [\"-I$(GENDIR)\", \"-I.\"] # Generate messages", "extra work with these include flags to avoid generating #", "temp_prefix, ] + proto_path_cmds + [ \" \".join([ protoc, \"--plugin=protoc-gen-grpc-cpp=%s\"", "\"/\"): proto_src_loc = proto_src_loc[len(full_proto_include) + 1:] else: print(\"Invalid proto include", "program-sizing build \"-g\", # Don't use this for program-sizing build", "supported). visibility: Standard blaze visibility parameter, passed through to all", "way. copts: Analogous to cc_library copts argument. defines: Symbols added", "added as \"-D\" compilation options. includes: Paths to add as", "], visibility = visibility, ) # Add any platform specific", "_and_ GNU extensions. ] # Used for linking binaries. _EMBEDDED_LDFLAGS", "= ALL_ARCHES defs_plus = (defines or []) + _ARCH_DEFINES textual_plus", "add as \"-I\" compilation options. testonly: Standard blaze testonly parameter.", "# Copy our files into a temporary directory and make", "+ \".%s\" + hdr_ext) lines = [ \"#if defined(STRATUM_ARCH_%s)\" %", "[HOST_ARCH] ALL_ARCHES = EMBEDDED_ARCHES + HOST_ARCHES # Identify Stratum platform", "cc_test srcs argument. deps: Analogous to cc_test deps argument. data:", "Returns: A path equivalent to the input path with minimal", "ppc = None, x86 = None, default = None): \"\"\"Public", "directory and make any necessary changes # before tarballing. cmds", "native.package_name()[len(full_proto_include):]) # We do a bit of extra work with", "affect the symbols generated by protoc, as well as the", "[], default = [\"//stratum/portage:dummy_with_main\"], ), data = data or [],", "return sc_platform_select( host = value if \"host\" in arches else", "protoc w/ erpc plugin: ${src}.proto => ${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup collects include", "visibility, ) register_extension_info( extension_name = \"sc_cc_lib\", label_regex_for_dep = \"{extension_name}\", )", "s.endswith(\".h\")]: alwayslink = 1 if not arches: arches = ALL_ARCHES", "\".pb.cc\" gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc) cmds = bash_vars + [ \"mkdir -p", "python_support == True. Args: name: Base name for this library.", "the given portable binary and arches. Args: name: Analogous to", "copts argument. linkopts: Analogous to cc_test linkopts argument. visibility: Analogous", ") sc_platform_alias( name = name, host = decorate(name, \"host\") if", "blaze visibility parameter, passed through to subsequent rules. testonly: Standard", "% temp_prefix, ] + proto_path_cmds + [ \" \".join([ protoc,", "= gen_stem + \".grpc.pb.h\" gen_grpc_pb_cc = gen_stem + \".grpc.pb.cc\" grpc_gen_hdrs.append(gen_grpc_pb_h)", "sc_platform_alias( name = name, host = decorate(name, \"host\") if \"host\"", "_PPC_GRTE, files = [\":libs\"], destdir = \"lib/stratum\", symlinks = \"dereference\",", "\"-I$${g3}\", \"-I.\"] else: proto_src_loc = \"%s/%s\" % (native.package_name(), src) proto_path_cmds.append(\"[[", "% line) for line in lines] new_hdr_loc = \"$(location %s)\"", "if testonly: arches = HOST_ARCHES else: arches = ALL_ARCHES service_enable", "arches = ALL_ARCHES service_enable = { \"grpc\": 0, } for", "def sc_package( name = None, bins = None, data =", "files needed to # compile this proto. native.filegroup( name =", "= depset(deps or []) srcs = depset(srcs or []) hdrs", "visibility): \"\"\"Macro to build .pb.h multi-arch master switch for sc_proto_lib.", "platform in \" + \"sc_platform_select. Please add.\") config_label_prefix = \"//stratum:stratum_\"", "the macros: decorate(path) sc_cc_lib Declare a portable Library. sc_proto_lib Declare", "python_support: Defaults to False. If True, generate a python proto", "grpc_pb_outs, tools = grpc_tools, cmd = \" && \".join(cmds), heuristic_label_expansion", "hdrs = None, arches = None, copts = None, defines", "\"-I\" compilation options. include_prefix: Analogous to cc_library include_prefix argument. strip_include_prefix:", "srcs = sc_platform_filter(srcs, [], arches), copts = copts, defines =", "defs. For example ../../dir/to/deeply/nested/path/../../../other/path will become ../../dir/to/other/path Args: path: A", "for dependent rules to utilize. deps: List of deps for", "this package. bins: List of sc_cc_bin rules to be packaged.", "[[ -e %s ]]\" % (\"%s/%s\" % (full_proto_include, proto_src_loc)), \"then", "containing the corresponding bin and data filesets, mapped to bin/", "fixing warnings. \"-no-canonical-prefixes\", # Don't mangle paths and confuse blaze.", "EMBEDDED_PPC = \"ppc\" EMBEDDED_X86 = \"x86\" EMBEDDED_ARCHES = [ EMBEDDED_PPC,", "====================================================================== _PPC_GRTE = \"//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot\" # X86 ====================================================================== _X86_GRTE = \"//grte/v4_x86/release/usr/grte/v4\"", "if d in (\"\", \".\"): if result: continue elif d", "X86 ====================================================================== _X86_GRTE = \"//grte/v4_x86/release/usr/grte/v4\" # Portability definitions =================================================== def", "builds. default: The result of the alias for any of", "python support, and may not use the proto_include field in", "visibility: Standard blaze visibility parameter, passed through to all filesets.", "relative filename from a label, replacing \"//\" and \":\". def", "the alias for any of {host,ppc,x86} that isn't specified. visibility:", "name: Analogous to cc_binary name argument. deps: Analogous to cc_binary", "Args: name: Base name for this library. pb_modifier: protoc plugin-dependent", "quote accordingly. include_fmt = \"#include \" + dquote(pkg + \"/\"", "= proto_include, grpc_shim_rule = grpc_shim_rule, ) if python_support: if proto_include:", "x86, ), visibility = visibility, ) # Embedded build definitions.", "-> bat/baz return label[1:] else: # bat/baz -> bat/baz return", "gen_srcs = [] gen_hdrs = [] grpc_gen_hdrs = [] grpc_gen_srcs", "\"{extension_name}\", ) def sc_cc_bin( name, deps = None, srcs =", "proto_src_loc, ]), \"cd $${g3}\", \"cp %s.grpc.pb.h %s\" % (temp_stem, _loc(gen_grpc_pb_h)),", "for program-sizing build \"-g\", # Don't use this for program-sizing", "Analogous to cc_binary srcs argument. arches: List of architectures to", "(full_proto_include, proto_src_loc)) # By cd-ing to another directory, we force", "argument. visibility: Analogous to cc_test visibility argument. \"\"\" cc_test( name", "binaries. _EMBEDDED_LDFLAGS = [ # \"-static\", # Use this for", "]]; then echo -IG3LOC/$(GENDIR)/%s; fi)\" % (full_proto_include, full_proto_include), ) accum_flags.append(", "out = decorate(name, extension), entries = [ native.FilesetEntry( files =", "name = shim_rule, srcs = srcs, outs = outs, cmd", "helpers. ============================================ def _normpath(path): \"\"\"Normalize a path. Normalizes a path", "size: Analogous to cc_test size argument. srcs: Analogous to cc_test", "multi-arch master switch for sc_proto_lib. For each src path.proto, generates", "_make_filename(label): if label.startswith(\"//\"): # //foo/bar:bat/baz -> google3_foo/bar/bat/baz return label.replace(\"//\", \"google3/\").replace(\":\",", "platform arch for .pb.h shims and other portability hacks. _ARCH_DEFINES", "\"\"\" bash_vars = [\"g3=$${PWD}\"] # TODO(unknown): Switch protobuf to using", "All known arches. EMBEDDED_ARCHES All embedded arches. EMBEDDED_PPC Name of", "result.append(d) return sep.join(result) # Adds a suffix to a label,", "= decorate(name_arch, \"proto_rollup.flags\") protoc_srcs_set = (srcs + hdrs + protoc_deps", "for shim switch file. # Lines expand inside squotes, so", "library. pb_modifier: protoc plugin-dependent file extension (e.g.: .pb) srcs: List", "% (full_proto_include, proto_src_loc)), \"then cd %s\" % full_proto_include, \"else cd", "the set of .proto files needed to # compile this", "+ proto_rollup_flags + [ \"-I%s\" % protobuf_include, \"--cpp_out=%s\" % cpp_out,", "or []) srcs = depset(srcs or []) hdrs = depset(hdrs", "protoc_deps = [] for dep in deps: if dep.endswith(\"_proto\"): protoc_deps.append(\"%s_%s_headers\"", "blaze visibility parameter. \"\"\" deps = depset(deps or []) srcs", "\"sc_platform_select. Please add.\") config_label_prefix = \"//stratum:stratum_\" return select({ \"//conditions:default\": (host", "specific value or default must be provided for every target", "then echo -IG3LOC/%s; fi)\" % (full_proto_include, full_proto_include), ) else: temp_prefix", "), visibility = visibility, ) # Embedded build definitions. ==============================================", "symbols are the macros: decorate(path) sc_cc_lib Declare a portable Library.", "[ \"//stratum:__subpackages__\", ] # # Build options for all embedded", "program-sizing build ] # PPC ====================================================================== _PPC_GRTE = \"//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot\" #", "1 else: fail(\"service='%s' not in (grpc, rpc)\" % service) deps", ") # Embedded build definitions. ============================================== EMBEDDED_PPC = \"ppc\" EMBEDDED_X86", "sc_platform_select based on a textual list of arches. def sc_platform_filter(value,", "cd %s\" % full_proto_include, \"else cd $(GENDIR)/%s\" % full_proto_include, \"fi\",", "= alwayslink, copts = sc_platform_filter(copts, [], arches), defines = defs_plus,", "_GRPC_PLUGIN protoc_deps = [] for dep in deps: if dep.endswith(\"_proto\"):", "[ \"mkdir -p %s\" % temp_prefix, ] + proto_path_cmds +", "None, default = None, visibility = None): \"\"\"Public macro to", "Please add.\") config_label_prefix = \"//stratum:stratum_\" return select({ \"//conditions:default\": (host or", "exported for dependent rules to utilize. deps: List of deps", "= None, srcs = None, arches = None, copts =", "doesn't have hdrs or deps # attributes, so all embedded", "The macros are like cc_library(), proto_library(), and cc_binary(), but with", "sc_cc_lib( name, deps = None, srcs = None, hdrs =", "srcs = None, hdrs = None, arches = None, copts", "textual_hdrs = sc_platform_filter( textual_plus | xdeps, [], arches, ), data", "cmds = [ \"TEMP_DIR=$(@D)/stratum_packaging_temp\", \"mkdir $${TEMP_DIR}\", \"cp -r %s $${TEMP_DIR}/tarball\"", "decorate(name, \"bin\"), destdir = \"bin\", ), native.FilesetEntry( srcdir = decorate(name,", "ALL_ARCHES defs_plus = (defines or []) + _ARCH_DEFINES cc_binary( name", "[my_proto_rollup], cmd = proto_rollup_cmd, visibility = visibility, testonly = testonly,", "_PROTOC = \"@com_google_protobuf//:protobuf:protoc\" _PROTOBUF = \"@com_google_protobuf//:protobuf\" _SC_GRPC_PLUGIN = \"//sandblaze/prebuilt/protobuf:grpc_cpp_plugin\" _GRPC_PLUGIN", "testonly, visibility = visibility, ) register_extension_info( extension_name = \"sc_cc_bin\", label_regex_for_dep", "to enable {\"grpc\", \"rpc\"}; Only \"grpc\" is supported. So \"rpc\"", ":bat/baz -> bat/baz return label[1:] else: # bat/baz -> bat/baz", "[] hdr_ext = pb_modifier + \".h\" for src in srcs:", "library. Only generated if python_support == True. Args: name: Base", "hdrs and/or src lists. \"\"\" outs = [] cmds =", "List of sc_cc_bin rules to be packaged. data: List of", "[protobuf_hdrs] protobuf_include = \"$${g3}/protobuf/src\" if arch in EMBEDDED_ARCHES: grpc_plugin =", "symbols. Careful, our proto might be in GENDIR! proto_path_cmds.append(\"; \".join([", "\\\"s,G3LOC,$${PWD},g\\\" %s)\" % _loc(my_proto_rollup)] proto_rollup_flags = [\"$${rollup}\"] if proto_include: #", "deps, arch = arch, visibility = visibility, testonly = testonly,", "_GRPC_PLUGIN = \"//grpc:grpc_cpp_plugin\" def _loc(target): \"\"\"Return target location for constructing", "hacks. _ARCH_DEFINES = sc_platform_select( default = [\"STRATUM_ARCH_HOST\"], ppc = [\"STRATUM_ARCH_PPC\"],", "= _gen_proto_shims( name = decorate(name[:-6], \"grpc_proto\"), pb_modifier = \".grpc.pb\", srcs", "register_extension_info( extension_name = \"sc_cc_bin\", label_regex_for_dep = \"{extension_name}\", ) # Protobuf", "for d in path.split(sep): if d in (\"\", \".\"): if", "python support.\") _gen_py_proto_lib( name = name, srcs = depset(srcs +", "]]\" % (\"%s/%s\" % (full_proto_include, proto_src_loc)), \"then cd %s\" %", "alias for ppc builds. x86: The result of the alias", "may seem: genrule doesn't have hdrs or deps # attributes,", "not use the proto_include field in this rule. services: List", "= [ native.FilesetEntry( files = inputs, ), ] + [", ") register_extension_info( extension_name = \"sc_cc_test\", label_regex_for_dep = \"{extension_name}\", ) def", "sc_platform_filter(srcs, [], arches), hdrs = sc_platform_filter(hdrs, [], arches), alwayslink =", "% fileset_name], outs = outs, cmd = \"; \".join(cmds), visibility", "= None, srcs = [], hdrs = [], deps =", "This is more general than it may seem: genrule doesn't", "Analogous to cc_test defines argument. copts: Analogous to cc_test copts", "filename[0:-6] new_hdr_name = hdr_stem + hdr_ext outs.append(new_hdr_name) # Generate lines", "Warn lots, and force fixing warnings. \"-no-canonical-prefixes\", # Don't mangle", "= deps or [], default = [\"//stratum/portage:dummy_with_main\"], ), data =", "outs.append(new_hdr_name) # Generate lines for shim switch file. # Lines", "\"-static\", # Use this for program-sizing build # \"-Wl,--gc-sections,--no-wchar-size-warning\", #", "\"cc_binary\", \"cc_library\", \"cc_test\") # Generic path & label helpers. ============================================", "= []): \"\"\"Public macro to build multi-arch library from Message", "utilize. deps: List of deps for this library arches: Which", "rpc)\" % service) deps = depset(deps or []) shim_rule =", "Copy our files into a temporary directory and make any", "\"--plugin=protoc-gen-grpc-cpp=%s\" % grpc_plugin, ] + gendir_include + proto_rollup_flags + [", "${src}.proto => ${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb rule to run protoc w/ erpc", "to utilize. deps: List of deps for this library arch:", "name = name, host = decorate(name, \"host\") if \"host\" in", "_loc(protoc_label) grpc_plugin = \"$${g3}/%s\" % _loc(grpc_plugin) cpp_out = \"$${g3}/$(GENDIR)/%s/%s\" %", "arch)) name_arch = decorate(name, arch) # We use this filegroup", "if not arches: arches = EMBEDDED_ARCHES fileset_name = decorate(name, \"fs\")", "srcs: Analogous to cc_test srcs argument. deps: Analogous to cc_test", "defs_plus = (defines or []) + _ARCH_DEFINES cc_binary( name =", "= [ \"-O0\", # Don't use this for program-sizing build", "based on the currently selected platform architecture. Args: name: Base", "= \"lib/stratum\", symlinks = \"dereference\", )], default = [], )", "for protoc: ${src}_${arch}_proto_rollup.flags Feed each set into sc_cc_lib to wrap", "% \"host\", \"#else\", \"#error Unknown STRATUM_ARCH\", \"#endif\", ] gen_cmds =", "(defines or []) + _ARCH_DEFINES cc_binary( name = name, deps", "a bit of extra work with these include flags to", "hdr_stem + hdr_ext outs.append(new_hdr_name) # Generate lines for shim switch", "argument. strip_include_prefix: Analogous to cc_library strip_include_prefix argument. data: Files to", "value for at least one platform in \" + \"sc_platform_select.", "how we can use $(CC_FLAGS) instead of this. \"-D__GOOGLE_STL_LEGACY_COMPATIBILITY\", ]", "arches = None, copts = None, defines = None, includes", "isn't specified. Returns: The requested selector. \"\"\" if default ==", "= fileset_name, out = name, entries = [ native.FilesetEntry( srcdir", "= None): \"\"\"Public macro to create an alias that changes", "lists of architectures for which they should be compiled -", "s + \"'\" # Emulate Python 2.5+ str(startswith([prefix ...]) def", "target def _gen_proto_lib( name, srcs, hdrs, deps, arch, visibility, testonly,", "cc_binary name argument. deps: Analogous to cc_binary deps argument. srcs:", "EMBEDDED_PPC Name of PowerPC arch - \"ppc\". EMBEDDED_X86 Name of", "cpp_out, proto_src_loc, ]), \"cd $${g3}\", \"cp %s.grpc.pb.h %s\" % (temp_stem,", "for all embedded architectures # # Set _TRACE_SRCS to show", "= \"//sandblaze/prebuilt/protobuf:grpc_cpp_plugin\" _GRPC_PLUGIN = \"//grpc:grpc_cpp_plugin\" def _loc(target): \"\"\"Return target location", "to cc_library include_prefix argument. strip_include_prefix: Analogous to cc_library strip_include_prefix argument.", "= 1 else: fail(\"service='%s' not in (grpc, rpc)\" % service)", "&& \".join(cmds), heuristic_label_expansion = 0, visibility = visibility, ) dep_set", "own implementation because import os is not allowed in build", "= None, testonly = None, textual_hdrs = None, visibility =", "visibility = None): \"\"\"Creates a cc_test rule that interacts safely", "+ \".grpc.pb.h\" gen_grpc_pb_cc = gen_stem + \".grpc.pb.cc\" grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc) cmds", "= None, textual_hdrs = None, visibility = None, xdeps =", "suffix): if label.endswith(\":\"): # .../bar: -> .../bar label = label[:-1]", "cc_test visibility argument. \"\"\" cc_test( name = name, size =", "grpc_plugin] protoc = \"$${g3}/%s\" % _loc(protoc_label) grpc_plugin = \"$${g3}/%s\" %", "use for host builds. ppc: The value to use for", "= decorate(name, arch), deps = dep_set, srcs = gen_srcs, hdrs", "= depset(srcs + hdrs), deps = deps, visibility = visibility,", "visibility, testonly = testonly, ) register_extension_info( extension_name = \"sc_proto_lib\", label_regex_for_dep", "] _PROTOC = \"@com_google_protobuf//:protobuf:protoc\" _PROTOBUF = \"@com_google_protobuf//:protobuf\" _SC_GRPC_PLUGIN = \"//sandblaze/prebuilt/protobuf:grpc_cpp_plugin\"", "\"\"\"Return target location for constructing commands. Args: target: Blaze target", "target name available to this build. Returns: $(location target) \"\"\"", "\"name\", generates: * ${name}_default_pb, a regular proto library. * ${name}_py,", "for dep in deps: if dep.endswith(\"_proto\"): protoc_deps.append(\"%s_%s_headers\" % (dep, arch))", "default must be provided for every target platform. Args: host:", "{host,ppc,x86} that isn't specified. Returns: The requested selector. \"\"\" if", "host = None, ppc = None, x86 = None, default", "and no default is set, a dummy default target is", "Public sc_proto_lib invokes this once per (listed) arch; # which", "= name, deps = sc_platform_filter(deps, [], arches), srcs = sc_platform_filter(srcs,", "rule. Any sc_proto_lib with python support may only depend on", "Generate lines for shim switch file. # Lines expand inside", "platform_entries = sc_platform_select( # We use a different ppc toolchain", "The value to use for x86 builds. default: The value", "ppc x86 \"\"\" load(\"//tools/build_defs/label:def.bzl\", \"parse_label\") load( \"//devtools/build_cleaner/skylark:build_defs.bzl\", \"register_extension_info\", ) load(\"@rules_proto//proto:defs.bzl\",", "-> .../bar:bat_suffix return \"%s_%s\" % (label, suffix) elif label.startswith(\"//\"): #", "s.startswith(prefix): return prefix return None def sc_platform_select(host = None, ppc", "might be generated on another forge server. proto_path_cmds = [\"rollup=$$(sed", "alias that changes based on target arch. Generates a blaze", "-> .../bar label = label[:-1] if \":\" in label: #", "defines argument. copts: Analogous to cc_test copts argument. linkopts: Analogous", "library \"name\", generates: * ${name}_default_pb, a regular proto library. *", "some restrictions. The key difference: you can supply lists of", "copts = depset(copts or []) includes = depset(includes or [])", "lines] new_hdr_loc = \"$(location %s)\" % new_hdr_name cmds.append(\"{ %s; }", "\"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib\", ] _PROTOC = \"@com_google_protobuf//:protobuf:protoc\" _PROTOBUF = \"@com_google_protobuf//:protobuf\" _SC_GRPC_PLUGIN =", "for, None => EMBEDDED_ARCHES (HOST_ARCHES not generally supported). visibility: Standard", "proto files. arches: List of arches this shim should support.", "=> ALL. visibility: Standard blaze visibility parameter, passed through to", "proto_library( name = regular_proto_name, srcs = srcs, deps = [decorate(dep,", "gen_grpc_pb_cc] native.genrule( name = src_arch + \".grpc.pb\", srcs = protoc_srcs_set,", "\"#elif defined(STRATUM_ARCH_%s)\" % \"HOST\", include_fmt % \"host\", \"#else\", \"#error Unknown", "executables. ppc = [native.FilesetEntry( srcdir = \"%s:BUILD\" % _PPC_GRTE, files", "HOST_ARCHES else: arches = ALL_ARCHES service_enable = { \"grpc\": 0,", "to cc_binary name argument. deps: Analogous to cc_binary deps argument.", "expanding implicit targets if needed. def decorate(label, suffix): if label.endswith(\":\"):", "\"-Wall\", \"-Werror\", # Warn lots, and force fixing warnings. \"-no-canonical-prefixes\",", "% (cpp_out, native.package_name()[len(full_proto_include):]) # We do a bit of extra", "grpc_dep_set = dep_set | [name] | _SC_GRPC_DEPS grpc_gen_hdrs_plus = grpc_gen_hdrs", "in build defs. For example ../../dir/to/deeply/nested/path/../../../other/path will become ../../dir/to/other/path Args:", "requested selector. \"\"\" if default == None and (host ==", "\"-I.\"] else: proto_src_loc = \"%s/%s\" % (native.package_name(), src) proto_path_cmds.append(\"[[ -e", ")], default = [], ) native.Fileset( name = fileset_name, out", "%s ]]; then echo -IG3LOC/%s; fi)\" % (full_proto_include, full_proto_include), )", "ppc: The value to use for ppc builds. x86: The", "as a `src'. # TODO(unknown): if useful again then inject", "textual list of arches. def sc_platform_filter(value, default, arches): return sc_platform_select(", "arch: Which architecture to build this library for. visibility: Standard", "protoc to produce # different symbols. Careful, our proto might", "copts = None, defines = None, includes = None, include_prefix", "\"#else\", \"#error Unknown STRATUM_ARCH\", \"#endif\", ] gen_cmds = [(\"printf '%%s\\\\n'", "and addressed independently. This aspect of the system is suboptimal", "= depset(deps or []) srcs = depset(srcs or []) if", "= testonly, visibility = visibility, ) register_extension_info( extension_name = \"sc_cc_bin\",", "= [protoc_label, grpc_plugin] protoc = \"$${g3}/%s\" % _loc(protoc_label) grpc_plugin =", "testonly = testonly, textual_hdrs = grpc_gen_hdrs_plus, visibility = visibility, )", "ALL. visibility: Standard blaze visibility parameter, passed through to subsequent", "deps # attributes, so all embedded dependencies appear as a", "this way. copts: Analogous to cc_binary copts argument. defines: Symbols", "but with different options and some restrictions. The key difference:", "\"cp %s.grpc.pb.cc %s\" % (temp_stem, _loc(gen_grpc_pb_cc)), ] grpc_pb_outs = [gen_grpc_pb_h,", "or deps # attributes, so all embedded dependencies appear as", "= \".pb\", srcs = srcs + hdrs, arches = arches,", "only depend on sc_proto_libs that also have python support, and", "= srcs, outs = outs, cmd = \" && \".join(cmds)", "targets if needed. def decorate(label, suffix): if label.endswith(\":\"): # .../bar:", "sc_proto_lib with python support may only depend on sc_proto_libs that", "gen_pb_cc = gen_stem + \".pb.cc\" gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc) cmds = bash_vars", "None, srcs = [], hdrs = [], deps = [],", "them up into a usable library; note that ${src}_${arch}_erpc_proto depends", "= None, default = None): \"\"\"Public macro to alter blaze", "# Creates a relative filename from a label, replacing \"//\"", "${src}_${arch}_proto_rollup.flags Feed each set into sc_cc_lib to wrap them them", "includes = includes, linkopts = [\"-ldl\", \"-lutil\"], testonly = testonly,", "if \"ppc\" in arches else default, x86 = value if", "arch; # which then calls sc_cc_lib with same name for", "\"py\") proto_library( name = regular_proto_name, srcs = srcs, deps =", "= name, actual = sc_platform_select( default = default or \"//stratum/portage:dummy\",", "None): \"\"\"Creates rules for the given portable binary and arches.", "\"share\", ), ] + platform_entries, visibility = visibility, ) outs", "\".%s\" + hdr_ext) lines = [ \"#if defined(STRATUM_ARCH_%s)\" % \"PPC\",", "cc_binary( name = name, deps = sc_platform_filter( deps, [\"//stratum/portage:dummy_with_main\"], arches,", "[] for dep in deps: if dep.endswith(\"_proto\"): protoc_deps.append(\"%s_%s_headers\" % (dep,", "file producing rules) to be packaged. deps: List of other", "] # Used for linking binaries. _EMBEDDED_LDFLAGS = [ #", "proto_library rules. def sc_proto_lib( name = None, srcs = [],", "definitions =================================================== def sc_cc_test( name, size = None, srcs =", "% (native.package_name(), arch) accum_flags = [] full_proto_include = None if", "name = name, pb_modifier = \".pb\", srcs = srcs +", "name = src_arch + \".pb\", srcs = protoc_srcs_set, outs =", "ppc: The result of the alias for ppc builds. x86:", "(temp_stem, _loc(gen_grpc_pb_h)), \"cp %s.grpc.pb.cc %s\" % (temp_stem, _loc(gen_grpc_pb_cc)), ] grpc_pb_outs", "fail(\"Missing a select value for at least one platform in", "also exported for dependent rules to utilize. deps: List of", "HOST_ARCHES = [HOST_ARCH] ALL_ARCHES = EMBEDDED_ARCHES + HOST_ARCHES # Identify", "arch is selected. During embedded builds this target will generate", "]]; then echo -IG3LOC/%s; fi)\" % (full_proto_include, full_proto_include), ) else:", "# different symbols. Careful, our proto might be in GENDIR!", "= visibility, ) if grpc_shim_rule: grpc_name = name[:-6] + \"_grpc_proto\"", "pb_modifier, srcs, arches, visibility): \"\"\"Macro to build .pb.h multi-arch master", "* ${name}_default_pb, a regular proto library. * ${name}_py, a py_proto_library", "as well as the include paths used for both sc_cc_lib", "prefix_list: if s.startswith(prefix): return prefix return None def sc_platform_select(host =", "use for x86 builds. default: The value to use for", "force fixing warnings. \"-no-canonical-prefixes\", # Don't mangle paths and confuse", "= visibility, ) register_extension_info( extension_name = \"sc_cc_test\", label_regex_for_dep = \"{extension_name}\",", "[], arches), visibility = visibility, ) register_extension_info( extension_name = \"sc_cc_lib\",", "None, includes = None, testonly = None, visibility = None):", "} | sort -u -o $(@)\" % \"; \".join(proto_rollup_cmds) native.genrule(", "= hdrs + gen_hdrs, arches = [arch], copts = [],", "= decorate(fileset_name, extension), out = decorate(name, extension), entries = [", "= [ \"-std=gnu++11\", # Allow C++11 features _and_ GNU extensions.", "# Warn lots, and force fixing warnings. \"-no-canonical-prefixes\", # Don't", "defines: Symbols added as \"-D\" compilation options. includes: Paths to", "(defines or []) + _ARCH_DEFINES textual_plus = textual_hdrs | depset(deps.to_list())", "rule to run protoc w/ erpc plugin: ${src}.proto => ${src}.${arch}.grpc.pb.{h,cc}", "in GENDIR! proto_path_cmds.append(\"; \".join([ \"if [[ -e %s ]]\" %", "srcs, deps, visibility, testonly): \"\"\"Creates a py_proto_library from the given", "native.genrule( name = decorate(name_arch, \"proto_rollup\"), srcs = proto_rollups, outs =", "a usable library; note that ${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto. Args:", "label.startswith(\":\"): # :bat/baz -> bat/baz return label[1:] else: # bat/baz", "Base name for this library. srcs: List of proto files", "left unstated. Internally, libraries and binaries are generated for every", "value or default must be provided for every target platform.", "the proto_include mechanism protoc_label = _PROTOC protobuf_label = _PROTOBUF protobuf_hdrs", "# Don't use this for program-sizing build \"-Wall\", \"-Werror\", #", "testonly, textual_hdrs = grpc_gen_hdrs_plus, visibility = visibility, ) def _gen_proto_shims(name,", "compilation options. include_prefix: Analogous to cc_library include_prefix argument. strip_include_prefix: Analogous", "=> ${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup collects include options for protoc: ${src}_${arch}_proto_rollup.flags Feed", "visibility, ) register_extension_info( extension_name = \"sc_cc_bin\", label_regex_for_dep = \"{extension_name}\", )", "if arch in EMBEDDED_ARCHES: grpc_plugin = _SC_GRPC_PLUGIN else: grpc_plugin =", "return \"%s_%s\" % (label, suffix) elif label.startswith(\"//\"): # //foo/bar ->", "depset(srcs or []) if not arches: arches = ALL_ARCHES defs_plus", "this rule. Any sc_proto_lib with python support may only depend", "name = decorate(name_arch, \"proto_rollup\"), srcs = proto_rollups, outs = [my_proto_rollup],", "Stratum internal visibility. The macros are like cc_library(), proto_library(), and", "a portable Binary. sc_package Declare a portable tarball package. and", "if level > 0: result.pop() level += -1 continue else:", "None, visibility = None): \"\"\"Creates a cc_test rule that interacts", "cc_library hdrs argument. arches: List of architectures to generate this", "result of the alias for x86 builds. default: The result", "for program-sizing build ] # PPC ====================================================================== _PPC_GRTE = \"//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot\"", "ALL_ARCHES defs_plus = (defines or []) + _ARCH_DEFINES textual_plus =", ") def sc_cc_bin( name, deps = None, srcs = None,", "% (\"%s/%s\" % (full_proto_include, proto_src_loc)), \"then cd %s\" % full_proto_include,", "\".grpc.pb\", srcs = srcs + hdrs, arches = arches, visibility", "default is set, a dummy default target is used instead.", "grpc_tools, cmd = \" && \".join(cmds), heuristic_label_expansion = 0, visibility", "src path.proto, generates path.pb.h consisting of: #ifdef logic to select", "of .proto files - private to this library. hdrs: As", "= value if \"host\" in arches else default, ppc =", "this way. copts: Analogous to cc_library copts argument. defines: Symbols", "srcs = depset(srcs + hdrs), deps = deps, visibility =", "The visibility of the alias target. \"\"\" native.alias( name =", "proto files hdrs: More files to build into this library,", "= \"x86\" EMBEDDED_ARCHES = [ EMBEDDED_PPC, EMBEDDED_X86, ] HOST_ARCH =", "alias target. host: The result of the alias for host", "is suboptimal - something along the lines of augmenting context", "use of path-up segments. Invalid input paths will stay invalid.", "if label.endswith(\":\"): # .../bar: -> .../bar label = label[:-1] if", "builds. x86: The result of the alias for x86 builds.", "generate a dummy binary and will not attempt to build", "arches: Which architectures to build this library for, None =>", "srcs: Analogous to cc_library srcs argument. hdrs: Analogous to cc_library", "gen_hdrs = [] grpc_gen_hdrs = [] grpc_gen_srcs = [] tools", "# //foo/bar:bat/baz -> google3_foo/bar/bat/baz return label.replace(\"//\", \"google3/\").replace(\":\", \"/\") elif label.startswith(\":\"):", "sc_package( name = None, bins = None, data = None,", "alias for any of {host,ppc,x86} that isn't specified. visibility: The", "# Don't use this for program-sizing build #-- \"-Os\", #", "+ hdrs), deps = deps, visibility = visibility, testonly =", "name = name, actual = sc_platform_select( default = default or", "build this library for. visibility: Standard blaze visibility parameter, passed", "blaze visibility argument. testonly: Standard blaze testonly argument. \"\"\" regular_proto_name", "== \"grpc\": service_enable[\"grpc\"] = 1 elif service == \"rpc\": service_enable[\"grpc\"]", "\"x86\", \"#elif defined(STRATUM_ARCH_%s)\" % \"HOST\", include_fmt % \"host\", \"#else\", \"#error", "for this library arches: Which architectures to build this library", "Protobuf ================================================================= _SC_GRPC_DEPS = [ \"//sandblaze/prebuilt/grpc\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_base\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib\", ] _PROTOC", "Switch protobuf to using the proto_include mechanism protoc_label = _PROTOC", ") load(\"@rules_proto//proto:defs.bzl\", \"proto_library\") load(\"@rules_cc//cc:defs.bzl\", \"cc_binary\", \"cc_library\", \"cc_test\") # Generic path", "for a given platform and no default is set, a", "testonly = testonly, proto_include = proto_include, grpc_shim_rule = grpc_shim_rule, )", "include_prefix: Analogous to cc_library include_prefix argument. strip_include_prefix: Analogous to cc_library", "dep.endswith(\"_proto\") ] proto_rollup_cmds = [\"printf '%%s\\n' %s\" % flag for", ":${src}_${arch}_proto_rollup collects include options for protoc: ${src}_${arch}_proto_rollup.flags Feed each set", "textual_hdrs: Analogous to cc_library. visibility: Standard blaze visibility parameter. xdeps:", "= sc_platform_filter(srcs, [], arches), hdrs = sc_platform_filter(hdrs, [], arches), alwayslink", "str(startswith([prefix ...]) def starts_with(s, prefix_list): for prefix in prefix_list: if", "is supported. So \"rpc\" and \"grpc\" are equivalent. \"\"\" if", "all of the data needed for this package and all", "defined(STRATUM_ARCH_%s)\" % \"HOST\", include_fmt % \"host\", \"#else\", \"#error Unknown STRATUM_ARCH\",", "\" + \"sc_platform_select. Please add.\") config_label_prefix = \"//stratum:stratum_\" return select({", "arches, visibility = visibility, ) for arch in arches: _gen_proto_lib(", "% \"ppc\", \"#elif defined(STRATUM_ARCH_%s)\" % \"X86\", include_fmt % \"x86\", \"#elif", "= None, data = None, defines = None, copts =", "_loc(grpc_plugin) cpp_out = \"$${g3}/$(GENDIR)/%s/%s\" % (native.package_name(), arch) accum_flags = []", "[(\"printf '%%s\\\\n' '%s'\" % line) for line in lines] new_hdr_loc", "The result of the alias for host builds. ppc: The", "= [gen_grpc_pb_h, gen_grpc_pb_cc] native.genrule( name = src_arch + \".grpc.pb\", srcs", "\"x86\" arch. HOST_ARCH Name of default \"host\" arch. HOST_ARCHES All", "\"-O0\", # Don't use this for program-sizing build #-- \"-Os\",", "strip_include_prefix = strip_include_prefix, testonly = testonly, textual_hdrs = sc_platform_filter( textual_plus", "every given ${src}.proto, generate: :${src}_${arch}.pb rule to run protoc ${src}.proto", "macro to alter blaze rules based on the platform architecture.", "extensions. ] # Used for linking binaries. _EMBEDDED_LDFLAGS = [", "symbols generated by protoc, as well as the include paths", "arches. def sc_platform_filter(value, default, arches): return sc_platform_select( host = value", "ALL_ARCHES All known arches. EMBEDDED_ARCHES All embedded arches. EMBEDDED_PPC Name", "header, not for flags, libs, etc. \"\"\" alwayslink = 0", "]: native.Fileset( name = decorate(fileset_name, extension), out = decorate(name, extension),", "= visibility, ) # Add any platform specific files to", "sc_proto_lib( name = None, srcs = [], hdrs = [],", "size argument. srcs: Analogous to cc_test srcs argument. deps: Analogous", "Used for linking binaries. _EMBEDDED_LDFLAGS = [ # \"-static\", #", "from a label, replacing \"//\" and \":\". def _make_filename(label): if", "%s.grpc.pb.cc %s\" % (temp_stem, _loc(gen_grpc_pb_cc)), ] grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc]", "arches else default, ppc = value if \"ppc\" in arches", "blaze deps argument. visibility: Standard blaze visibility argument. testonly: Standard", "to another directory, we force protoc to produce # different", "invocations. _EMBEDDED_CFLAGS = [ \"-I$(GENDIR)\", ] # Used for C++", "a temporary directory and make any necessary changes # before", "\"\"\" alwayslink = 0 deps = depset(deps or []) srcs", "because our rollup command # might be generated on another", "for this package and all dependency packages. * ${name}_${arch} fileset", "as \"-I\" compilation options. testonly: Standard blaze testonly parameter. visibility:", "\"headers\"), srcs = hdrs + protoc_deps, visibility = visibility, )", "(src_stem, arch) temp_stem = \"%s/%s\" % (temp_prefix, src_stem) gen_stem =", "label_regex_for_dep = \"{extension_name}\", ) def sc_package( name = None, bins", "[\"//stratum/portage:dummy_with_main\"], ), data = data or [], defines = defines,", "\"if [[ -e %s ]]\" % (\"%s/%s\" % (full_proto_include, proto_src_loc)),", "else: fail(\"service='%s' not in (grpc, rpc)\" % service) deps =", "in arches else default, x86 = value if \"x86\" in", "return \"%s:%s_%s\" % (label, label.split(\"/\")[-1], suffix) else: # bar ->", "cc_library copts argument. defines: Symbols added as \"-D\" compilation options.", "srcs = srcs + hdrs, arches = arches, visibility =", "default = default or \"//stratum/portage:dummy\", host = host, ppc =", "\"-fno-builtin-calloc\", \"-fno-builtin-realloc\", \"-fno-builtin-free\", \"-D__STDC_FORMAT_MACROS=1\", # TODO(unknown): Figure out how we", "decorate(name, \"x86\") if \"x86\" in arches else None, visibility =", "proto_include: Path to add to include path. This will affect", "# before tarballing. cmds = [ \"TEMP_DIR=$(@D)/stratum_packaging_temp\", \"mkdir $${TEMP_DIR}\", \"cp", "cmds = bash_vars + [ \"mkdir -p %s\" % temp_prefix,", "size = None, srcs = None, deps = None, data", "\".join(cmds) or \"true\", ) sc_platform_alias( name = name, host =", "decorate(name, \"host\") if \"host\" in arches else None, ppc =", "based on target arch. Generates a blaze alias that will", "for both sc_cc_lib and sc_proto_lib rules that depend on this", "dep in deps: if dep.endswith(\"_proto\"): protoc_deps.append(\"%s_%s_headers\" % (dep, arch)) name_arch", "bar -> bar_suffix return \"%s_%s\" % (label, suffix) # Creates", "safely with Stratum builds. Generates a cc_test rule that doesn't", "C++ compiler invocations. _EMBEDDED_CFLAGS = [ \"-I$(GENDIR)\", ] # Used", "label.split(\"/\")[-1], suffix) else: # bar -> bar_suffix return \"%s_%s\" %", "= \"share\", ), ] + platform_entries, visibility = visibility, )", "all of the binaries and all of the data needed", "ppc builds. x86: The result of the alias for x86", "visibility = visibility, ) dep_set = depset(deps) | [protobuf_label] includes", "[decorate(dep, \"default_pb\") for dep in deps], visibility = visibility, testonly", "gendir_include = [\"-I$(GENDIR)\", \"-I.\"] # Generate messages gen_pb_h = gen_stem", "not allowed in build defs. For example ../../dir/to/deeply/nested/path/../../../other/path will become", "xdeps = None): \"\"\"Creates rules for the given portable library", "visibility = visibility, ) if grpc_shim_rule: grpc_name = name[:-6] +", "target will generate a dummy binary and will not attempt", "\"//devtools/build_cleaner/skylark:build_defs.bzl\", \"register_extension_info\", ) load(\"@rules_proto//proto:defs.bzl\", \"proto_library\") load(\"@rules_cc//cc:defs.bzl\", \"cc_binary\", \"cc_library\", \"cc_test\") #", "\"host\" HOST_ARCHES = [HOST_ARCH] ALL_ARCHES = EMBEDDED_ARCHES + HOST_ARCHES #", "level > 0: result.pop() level += -1 continue else: level", "Feed each set into sc_cc_lib to wrap them them up", "grpc_shim_rule = _gen_proto_shims( name = decorate(name[:-6], \"grpc_proto\"), pb_modifier = \".grpc.pb\",", "corresponding directories. Providing own implementation because import os is not", "ppc # executables. ppc = [native.FilesetEntry( srcdir = \"%s:BUILD\" %", "decorate(name, \"data\"), destdir = \"share\", ), ] + platform_entries, visibility", "host builds. ppc: The value to use for ppc builds.", "arches), visibility = visibility, ) register_extension_info( extension_name = \"sc_cc_lib\", label_regex_for_dep", "= proto_rollups, outs = [my_proto_rollup], cmd = proto_rollup_cmd, visibility =", "cd-ing to another directory, we force protoc to produce #", "# Set _TRACE_SRCS to show sources in embedded sc_cc_lib compile", "HOST_ARCHES All host arches. STRATUM_INTERNAL For declaring Stratum internal visibility.", "host: The result of the alias for host builds. ppc:", "= depset(srcs or []) if not arches: arches = ALL_ARCHES", "and the variables/lists: ALL_ARCHES All known arches. EMBEDDED_ARCHES All embedded", "and \":\". def _make_filename(label): if label.startswith(\"//\"): # //foo/bar:bat/baz -> google3_foo/bar/bat/baz", "in follow-on hdrs and/or src lists. \"\"\" outs = []", ".proto files needed to # compile this proto. native.filegroup( name", "[ \"-std=gnu++11\", # Allow C++11 features _and_ GNU extensions. ]", "None, srcs = None, hdrs = None, arches = None,", "[\"STRATUM_ARCH_PPC\"], x86 = [\"STRATUM_ARCH_X86\"], ) STRATUM_INTERNAL = [ \"//stratum:__subpackages__\", ]", "compiler invocations. _EMBEDDED_CXXFLAGS = [ \"-std=gnu++11\", # Allow C++11 features", "directory, we force protoc to produce # different symbols. Careful,", "tools = [protoc_label] grpc_tools = [protoc_label, grpc_plugin] protoc = \"$${g3}/%s\"", "\".join([ protoc, \"--plugin=protoc-gen-grpc-cpp=%s\" % grpc_plugin, ] + gendir_include + proto_rollup_flags", "to cc_library strip_include_prefix argument. data: Files to provide as data", "= [ \"TEMP_DIR=$(@D)/stratum_packaging_temp\", \"mkdir $${TEMP_DIR}\", \"cp -r %s $${TEMP_DIR}/tarball\" %", "| [protobuf_label] includes = [] if proto_include: includes = [proto_include]", "_ARCH_DEFINES cc_binary( name = name, deps = sc_platform_filter( deps, [\"//stratum/portage:dummy_with_main\"],", "= None, deps = None, data = None, defines =", "\"default_pb\") for dep in deps], visibility = visibility, testonly =", "arches = arches, visibility = visibility, ) for arch in", "HOST_ARCH Name of default \"host\" arch. HOST_ARCHES All host arches.", "PPC ====================================================================== _PPC_GRTE = \"//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot\" # X86 ====================================================================== _X86_GRTE =", "\"\"\"Public macro to alter blaze rules based on the platform", "be provided for every target platform. Args: host: The value", "build when an embedded arch is selected. During embedded builds", "-> bar_suffix return \"%s_%s\" % (label, suffix) # Creates a", "sc_cc_lib compile steps. # This is more general than it", "\"else cd $(GENDIR)/%s\" % full_proto_include, \"fi\", ])) gendir_include = [\"-I$${g3}/$(GENDIR)\",", "protobuf(s). For library \"name\", generates: * ${name}_shim aka .pb.h master", "[\"STRATUM_ARCH_X86\"], ) STRATUM_INTERNAL = [ \"//stratum:__subpackages__\", ] # # Build", ".../bar label = label[:-1] if \":\" in label: # .../bar:bat", "to build any dependencies. Args: name: Analogous to cc_test name", "full_proto_include), ) else: temp_prefix = \"%s/%s\" % (cpp_out, native.package_name()) proto_rollups", "temp_prefix = \"%s/%s\" % (cpp_out, native.package_name()) proto_rollups = [ decorate(decorate(dep,", "depset(textual_hdrs or []) if srcs: if [s for s in", "then inject from cmdline else kill feature. _TRACE_SRCS = False", "testonly: Standard blaze testonly parameter. proto_include: Include path for generated", "Standard blaze visibility parameter. \"\"\" deps = depset(deps or [])", "cd-ing to another directory before protoc, so # adjust our", "Analogous to cc_test name argument. size: Analogous to cc_test size", "dquote(s): return '\"' + s + '\"' # Adds squotes", "BUILD file, specifying the symbols needed. The public symbols are", "on the target platform architecture. If no selection is provided", "\"\"\" if default == None and (host == None or", "deps = sc_platform_filter(deps, [], arches), srcs = sc_platform_filter(srcs, [], arches),", "srcs: Analogous to cc_binary srcs argument. arches: List of architectures", "\"//sandblaze/prebuilt/grpc\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_base\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib\", ] _PROTOC = \"@com_google_protobuf//:protobuf:protoc\" _PROTOBUF = \"@com_google_protobuf//:protobuf\"", "into sc_cc_lib to wrap them them up into a usable", "$${g3}\", \"cp %s.pb.h %s\" % (temp_stem, _loc(gen_pb_h)), \"cp %s.pb.cc %s\"", "or []) data = depset(data or []) deps = depset(deps", "None, include_prefix = None, strip_include_prefix = None, data = None,", "% (src_stem, arch) temp_stem = \"%s/%s\" % (temp_prefix, src_stem) gen_stem", "] pb_outs = [gen_pb_h, gen_pb_cc] native.genrule( name = src_arch +", "work with these include flags to avoid generating # warnings.", "srcs, outs = outs, cmd = \" && \".join(cmds) or", "\"%s:%s_%s\" % (label, label.split(\"/\")[-1], suffix) else: # bar -> bar_suffix", "depset(hdrs or []) xdeps = depset(xdeps or []) copts =", "produce # different symbols. Careful, our proto might be in", "for each arch; # multiple such calls are OK as", "unnecessary path-up segments and its corresponding directories. Providing own implementation", "return prefix return None def sc_platform_select(host = None, ppc =", "include options for protoc: ${src}_${arch}_proto_rollup.flags Feed each set into sc_cc_lib", "label, replacing \"//\" and \":\". def _make_filename(label): if label.startswith(\"//\"): #", "arch) # We can't use $${PWD} until this step, because", "= includes, testonly = testonly, textual_hdrs = grpc_gen_hdrs_plus, visibility =", "# This means that we must provide portable shared libs", "target arch. Generates a blaze alias that will select the", "grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc) cmds = bash_vars + [ \"mkdir -p %s\"", "on the platform architecture. Generates a blaze select(...) statement that", "follow-on hdrs and/or src lists. \"\"\" outs = [] cmds", "[ EMBEDDED_PPC, EMBEDDED_X86, ] HOST_ARCH = \"host\" HOST_ARCHES = [HOST_ARCH]", "_loc(my_proto_rollup)] proto_rollup_flags = [\"$${rollup}\"] if proto_include: # We'll be cd-ing", "+ \".h\" for src in srcs: pkg, filename = parse_label(src)", "Standard blaze testonly parameter. visibility: Standard blaze visibility parameter. \"\"\"", "Use this for program-sizing build ] # PPC ====================================================================== _PPC_GRTE", "= _SC_GRPC_PLUGIN else: grpc_plugin = _GRPC_PLUGIN protoc_deps = [] for", "\"\"\"A portable build system for Stratum P4 switch stack. To", ") if grpc_shim_rule: grpc_name = name[:-6] + \"_grpc_proto\" grpc_dep_set =", "on the currently selected platform architecture. Args: name: Base name", "[] for d in path.split(sep): if d in (\"\", \".\"):", "_loc(fileset_name), \"if [[ -e $${TEMP_DIR}/tarball/bin ]]\", \"then for f in", "= visibility, testonly = testonly, ) native.py_proto_library( name = py_name,", "decorate(name_arch, \"proto_rollup.flags\") protoc_srcs_set = (srcs + hdrs + protoc_deps +", "of the alias target. host: The result of the alias", "portable build system for Stratum P4 switch stack. To use", "\"-I.\"] # Generate messages gen_pb_h = gen_stem + \".pb.h\" gen_pb_cc", "argument. srcs: Analogous to cc_binary srcs argument. arches: List of", "# bat/baz -> bat/baz return label # Adds dquotes around", "note that ${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto. Args: name: Base name", "depset(deps or []) srcs = depset(srcs or []) if not", "= \" && \".join(cmds), heuristic_label_expansion = 0, visibility = visibility,", "\"ppc\", \"#elif defined(STRATUM_ARCH_%s)\" % \"X86\", include_fmt % \"x86\", \"#elif defined(STRATUM_ARCH_%s)\"", "\"-no-canonical-prefixes\", # Don't mangle paths and confuse blaze. \"-fno-builtin-malloc\", #", "= sc_platform_select( default = [\"STRATUM_ARCH_HOST\"], ppc = [\"STRATUM_ARCH_PPC\"], x86 =", "label_regex_for_dep = \"{extension_name}\", ) def sc_cc_lib( name, deps = None,", "be generated and addressed independently. This aspect of the system", "includes: Paths to add as \"-I\" compilation options. include_prefix: Analogous", "= pb_modifier + \".h\" for src in srcs: pkg, filename", "to use for x86 builds. default: The value to use", "compilation options. testonly: Standard blaze testonly parameter. visibility: Standard blaze", "%s\" % full_proto_include, \"else cd $(GENDIR)/%s\" % full_proto_include, \"fi\", ]))", "paths and confuse blaze. \"-fno-builtin-malloc\", # We'll use tcmalloc \"-fno-builtin-calloc\",", "- no decorations assumed, used and exported as header, not", "is provided for a given platform and no default is", "filegroup to accumulate the set of .proto files needed to", "% flag for flag in accum_flags] proto_rollup_cmds.append(\"cat $(SRCS)\") proto_rollup_cmd =", "embedded architectures # # Set _TRACE_SRCS to show sources in", "gen_stem + \".pb.h\" gen_pb_cc = gen_stem + \".pb.cc\" gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc)", "\"//grte/v4_x86/release/usr/grte/v4\" # Portability definitions =================================================== def sc_cc_test( name, size =", "(\" && \".join(gen_cmds), new_hdr_loc)) shim_rule = decorate(name, \"shims\") native.genrule( name", "visibility = None, testonly = None, proto_include = None, python_support", "starts_with(s, prefix_list): for prefix in prefix_list: if s.startswith(prefix): return prefix", "this rule. services: List of services to enable {\"grpc\", \"rpc\"};", "\"..\": if level > 0: result.pop() level += -1 continue", "+ \".grpc.pb.cc\" grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc) cmds = bash_vars + [ \"mkdir", "[], visibility = None, testonly = None, proto_include = None,", "if s.startswith(prefix): return prefix return None def sc_platform_select(host = None,", "+ hdrs: if src.endswith(\".proto\"): src_stem = src[0:-6] src_arch = \"%s_%s\"", "become ../../dir/to/other/path Args: path: A valid absolute or relative path", "%s\" % (temp_stem, _loc(gen_pb_cc)), ] pb_outs = [gen_pb_h, gen_pb_cc] native.genrule(", "None, srcs = None, arches = None, copts = None,", "+= 1 result.append(d) return sep.join(result) # Adds a suffix to", "argument. defines: Analogous to cc_test defines argument. copts: Analogous to", "for every listed architecture. The names are decorated to keep", "depset(deps.to_list()) cc_library( name = name, deps = sc_platform_filter(deps, [], arches),", "%s\" % temp_prefix, ] + proto_path_cmds + [ \" \".join([protoc]", "name = decorate(grpc_name, arch), deps = grpc_dep_set, srcs = grpc_gen_srcs,", "package. Args: name: Base name for this package. bins: List", "+ \"x86\": (x86 or default), }) # Generates an sc_platform_select", "rename not available. \"done\", \"fi\", \"tar czf %s -h -C", "proto might be in GENDIR! proto_path_cmds.append(\"; \".join([ \"if [[ -e", "testonly, proto_include, grpc_shim_rule): \"\"\"Creates rules and filegroups for embedded protobuf", "in deps], visibility = visibility, testonly = testonly, ) native.py_proto_library(", "needed for this package and all dependency packages. * ${name}_${arch}", "options. include_prefix: Analogous to cc_library include_prefix argument. strip_include_prefix: Analogous to", "sc_platform_filter(includes, [], arches), include_prefix = include_prefix, strip_include_prefix = strip_include_prefix, testonly", "-h -C $${TEMP_DIR}/tarball .\" % _loc(name + \".tar.gz\"), \"rm -rf", "if useful again then inject from cmdline else kill feature.", "testonly = None, textual_hdrs = None, visibility = None, xdeps", "def squote(s): return \"'\" + s + \"'\" # Emulate", "default), }) # Generates an sc_platform_select based on a textual", "to cc_binary deps argument. srcs: Analogous to cc_binary srcs argument.", "Standard blaze visibility parameter, passed through to subsequent rules. testonly:", "path for generated sc_cc_libs. grpc_shim_rule: If needed, the name of", "[protoc_label] grpc_tools = [protoc_label, grpc_plugin] protoc = \"$${g3}/%s\" % _loc(protoc_label)", "builds that .tar.gz package. Args: name: Base name for this", "to be packaged. data: List of files (and file producing", "generates: * ${name}_default_pb, a regular proto library. * ${name}_py, a", "Figure out how we can use $(CC_FLAGS) instead of this.", "protobuf compile rules - one for each arch. * sc_cc_lib(name)", "% (label, suffix) elif label.startswith(\"//\"): # //foo/bar -> //foo/bar:bar_suffix return", "target location for constructing commands. Args: target: Blaze target name", "[arch], copts = [], includes = includes, testonly = testonly,", "# which then calls sc_cc_lib with same name for each", "rules based on the platform architecture. Generates a blaze select(...)", "lib. \"\"\" bash_vars = [\"g3=$${PWD}\"] # TODO(unknown): Switch protobuf to", "\"grpc\": 0, } for service in services or []: if", "= visibility, ) dep_set = depset(deps) | [protobuf_label] includes =", "decorate(name, \"py\") proto_library( name = regular_proto_name, srcs = srcs, deps", "srcs = None, deps = None, data = None, defines", "\"host\" in arches else default, ppc = value if \"ppc\"", "= decorate(grpc_name, arch), deps = grpc_dep_set, srcs = grpc_gen_srcs, hdrs", "return \"%s_%s\" % (label, suffix) # Creates a relative filename", "host = value if \"host\" in arches else default, ppc", "\"{extension_name}\", ) def sc_cc_lib( name, deps = None, srcs =", "strip_include_prefix: Analogous to cc_library strip_include_prefix argument. data: Files to provide", "_PROTOBUF protobuf_hdrs = \"%s:well_known_types_srcs\" % protobuf_label protobuf_srcs = [protobuf_hdrs] protobuf_include", "with minimal use of path-up segments. Invalid input paths will", "# TODO(unknown): Add support for depending on normal proto_library rules.", "a portable Library. sc_proto_lib Declare a portable .proto Library. sc_cc_bin", "(x86 or default), }) # Generates an sc_platform_select based on", "srcdir = \"%s:BUILD\" % _PPC_GRTE, files = [\":libs\"], destdir =", "entries = [ native.FilesetEntry( files = inputs, ), ] +", "up into a usable library; note that ${src}_${arch}_erpc_proto depends on", "that interacts safely with Stratum builds. Generates a cc_test rule", "to this library. hdrs: As above, but also exported for", "The names are decorated to keep them different and allow", "useful again then inject from cmdline else kill feature. _TRACE_SRCS", "of the alias target. \"\"\" native.alias( name = name, actual", "this library arch: Which architecture to build this library for.", "deps = deps, visibility = visibility, testonly = testonly, )", "tcmalloc \"-fno-builtin-calloc\", \"-fno-builtin-realloc\", \"-fno-builtin-free\", \"-D__STDC_FORMAT_MACROS=1\", # TODO(unknown): Figure out how", "embedded builds this target will generate a dummy binary and", "deps: Analogous to cc_library deps argument. srcs: Analogous to cc_library", "all embedded architectures # # Set _TRACE_SRCS to show sources", "well as the include paths used for both sc_cc_lib and", "= visibility, ) for arch in arches: _gen_proto_lib( name =", "None, ppc = None, x86 = None, default = None,", "_TRACE_SRCS = False # Used for all gcc invocations. _EMBEDDED_FLAGS", "that will select the appropriate proto target based on the", "Only generated if python_support == True. Args: name: Base name", "= \"%s:BUILD\" % _PPC_GRTE, files = [\":libs\"], destdir = \"lib/stratum\",", "None, testonly = None, textual_hdrs = None, visibility = None,", "a string. def squote(s): return \"'\" + s + \"'\"", "this target will generate a dummy binary and will not", "# We use a different ppc toolchain for Stratum. #", "visibility, ) grpc_shim_rule = None if (service_enable[\"grpc\"]): grpc_shim_rule = _gen_proto_shims(", "ALL_ARCHES = EMBEDDED_ARCHES + HOST_ARCHES # Identify Stratum platform arch", "[]) srcs = depset(srcs or []) if not arches: arches", "accordingly. proto_src_loc = \"%s/%s\" % (native.package_name(), src) if proto_src_loc.startswith(full_proto_include +", "build multi-arch library from Message protobuf(s). For library \"name\", generates:", "arches else None, x86 = decorate(name, \"x86\") if \"x86\" in", "f in $${TEMP_DIR}/tarball/bin/*.stripped\", \" do mv $${f} $${f%.stripped}\", # rename", "includes = [proto_include] # Note: Public sc_proto_lib invokes this once", "else default, ) def sc_platform_alias( name, host = None, ppc", "[]) if srcs: if [s for s in srcs.to_list() if", "testonly): \"\"\"Creates a py_proto_library from the given srcs. There's no", "(listed) arch; # which then calls sc_cc_lib with same name", "parameter, passed through to all filesets. \"\"\" bins = depset(bins", "proto_include field in this rule. services: List of services to", "dep.endswith(\"_proto\"): protoc_deps.append(\"%s_%s_headers\" % (dep, arch)) name_arch = decorate(name, arch) #", "argument. srcs: Standard blaze srcs argument. deps: Standard blaze deps", "for ppc builds. x86: The result of the alias for", "+ \".pb\", srcs = protoc_srcs_set, outs = pb_outs, tools =", "# We'll be cd-ing to another directory before protoc, so", "visibility. The macros are like cc_library(), proto_library(), and cc_binary(), but", "portable shared libs for our ppc # executables. ppc =", "target. host: The result of the alias for host builds.", "in arches else default, ppc = value if \"ppc\" in", "blaze testonly parameter. proto_include: Include path for generated sc_cc_libs. grpc_shim_rule:", "inputs in [ (\"bin\", [\"%s.stripped\" % b for b in", "build ] # PPC ====================================================================== _PPC_GRTE = \"//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot\" # X86", "builds. ppc: The result of the alias for ppc builds.", "data needed for this package and all dependency packages. *", "\":\". def _make_filename(label): if label.startswith(\"//\"): # //foo/bar:bat/baz -> google3_foo/bar/bat/baz return", "[], hdrs = [], deps = [], arches = [],", "visibility = visibility, ) for arch in arches: _gen_proto_lib( name", "= _PROTOC protobuf_label = _PROTOBUF protobuf_hdrs = \"%s:well_known_types_srcs\" % protobuf_label", "else: # bat/baz -> bat/baz return label # Adds dquotes", "data = depset(data or []) textual_hdrs = depset(textual_hdrs or [])", "protobuf_srcs + [my_proto_rollup]) gen_srcs = [] gen_hdrs = [] grpc_gen_hdrs", "%s\" % temp_prefix, ] + proto_path_cmds + [ \" \".join([", "deps = None, arches = None, visibility = None): \"\"\"Public", "visibility: Standard blaze visibility parameter. xdeps: External (file) dependencies of", "of the alias for ppc builds. x86: The result of", "= [] for d in path.split(sep): if d in (\"\",", "for at least one platform in \" + \"sc_platform_select. Please", "\"#elif defined(STRATUM_ARCH_%s)\" % \"X86\", include_fmt % \"x86\", \"#elif defined(STRATUM_ARCH_%s)\" %", "For example ../../dir/to/deeply/nested/path/../../../other/path will become ../../dir/to/other/path Args: path: A valid", "More files to build into this library, but also exported", "# TODO(unknown): Switch protobuf to using the proto_include mechanism protoc_label", "testonly parameter. proto_include: Path to add to include path. This", "= \"//grpc:grpc_cpp_plugin\" def _loc(target): \"\"\"Return target location for constructing commands.", "no clean way to make python protos work with sc_proto_lib's", "srcs: List of .proto files - private to this library.", "are OK as long as the arches are disjoint. sc_cc_lib(", "None, xdeps = None): \"\"\"Creates rules for the given portable", "False, services = []): \"\"\"Public macro to build multi-arch library", "any dependencies. Args: name: Analogous to cc_test name argument. size:", "gen_cmds = [(\"printf '%%s\\\\n' '%s'\" % line) for line in", "copts = sc_platform_filter(copts, [], arches), defines = defs_plus, includes =", "External (file) dependencies of this library - no decorations assumed,", "(e.g.: .pb) srcs: List of proto files. arches: List of", "All embedded arches. EMBEDDED_PPC Name of PowerPC arch - \"ppc\".", "library arches: Which architectures to build this library for, None", "== \"..\": if level > 0: result.pop() level += -1", "parameter. textual_hdrs: Analogous to cc_library. visibility: Standard blaze visibility parameter.", "depset(srcs or []) hdrs = depset(hdrs or []) xdeps =", "with python support.\") _gen_py_proto_lib( name = name, srcs = depset(srcs", "instead. A specific value or default must be provided for", "Set _TRACE_SRCS to show sources in embedded sc_cc_lib compile steps.", "bat/baz return label[1:] else: # bat/baz -> bat/baz return label", "to create an alias that changes based on target arch.", "all gcc invocations. _EMBEDDED_FLAGS = [ \"-O0\", # Don't use", "${name}_py, a py_proto_library based on ${name}_default_pb. Args: name: Standard blaze", "name = fileset_name, out = name, entries = [ native.FilesetEntry(", "all dependency packages. * ${name}_${arch} fileset containing the corresponding bin", "hdrs: As above, but also exported for dependent rules to", "cc_library(), proto_library(), and cc_binary(), but with different options and some", "[\"-ldl\", \"-lutil\"], testonly = testonly, visibility = visibility, ) register_extension_info(", "Analogous to cc_test deps argument. data: Analogous to cc_test data", "_PROTOC protobuf_label = _PROTOBUF protobuf_hdrs = \"%s:well_known_types_srcs\" % protobuf_label protobuf_srcs", "+ [my_proto_rollup]) gen_srcs = [] gen_hdrs = [] grpc_gen_hdrs =", "python support may only depend on sc_proto_libs that also have", "(HOST_ARCHES not generally supported). visibility: Standard blaze visibility parameter, passed", "is used instead. A specific value or default must be", "out how we can use $(CC_FLAGS) instead of this. \"-D__GOOGLE_STL_LEGACY_COMPATIBILITY\",", "% \"; \".join(proto_rollup_cmds) native.genrule( name = decorate(name_arch, \"proto_rollup\"), srcs =", "extension), entries = [ native.FilesetEntry( files = inputs, ), ]", "${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb rule to run protoc w/ erpc plugin: ${src}.proto", "the symbols needed. The public symbols are the macros: decorate(path)", "= sc_platform_filter( textual_plus | xdeps, [], arches, ), data =", "or []) srcs = depset(srcs or []) if not arches:", "# We do a bit of extra work with these", "-C $${TEMP_DIR}/tarball .\" % _loc(name + \".tar.gz\"), \"rm -rf $${TEMP_DIR}\",", "arches: List of architectures to generate this way. copts: Analogous", "alwayslink = 1 if not arches: arches = ALL_ARCHES defs_plus", "\"//stratum:stratum_\" return select({ \"//conditions:default\": (host or default), config_label_prefix + \"ppc\":", "# attributes, so all embedded dependencies appear as a `src'.", "arches), alwayslink = alwayslink, copts = sc_platform_filter(copts, [], arches), defines", "for prefix in prefix_list: if s.startswith(prefix): return prefix return None", "= [], hdrs = [], deps = [], arches =", "# Copyright 2018 Google LLC # Copyright 2018-present Open Networking", "protobuf_hdrs = \"%s:well_known_types_srcs\" % protobuf_label protobuf_srcs = [protobuf_hdrs] protobuf_include =", "in most contexts to alter a blaze rule based on", "copts = None, defines = None, includes = None, testonly", "squotes, so quote accordingly. include_fmt = \"#include \" + dquote(pkg", "regular proto library. * ${name}_py, a py_proto_library based on ${name}_default_pb.", "is set, a dummy default target is used instead. Args:", "to normalize. Returns: A path equivalent to the input path", "will become ../../dir/to/other/path Args: path: A valid absolute or relative", "segments. Invalid input paths will stay invalid. \"\"\" sep =", "based on ${name}_default_pb. Args: name: Standard blaze name argument. srcs:", "argument. hdrs: Analogous to cc_library hdrs argument. arches: List of", "ppc = ppc, x86 = x86, ), visibility = visibility,", "library; note that ${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto. Args: name: Base", "include_prefix argument. strip_include_prefix: Analogous to cc_library strip_include_prefix argument. data: Files", "cc_binary deps argument. srcs: Analogous to cc_binary srcs argument. arches:", "dependencies. Args: name: Analogous to cc_test name argument. size: Analogous", "of {host,ppc,x86} that isn't specified. Returns: The requested selector. \"\"\"", "+= -1 continue else: level += 1 result.append(d) return sep.join(result)", "_SC_GRPC_PLUGIN = \"//sandblaze/prebuilt/protobuf:grpc_cpp_plugin\" _GRPC_PLUGIN = \"//grpc:grpc_cpp_plugin\" def _loc(target): \"\"\"Return target", "appropriate proto target based on the currently selected platform architecture.", "None, visibility = None): \"\"\"Creates rules for the given portable", "= depset(srcs or []) hdrs = depset(hdrs or []) xdeps", "(temp_stem, _loc(gen_grpc_pb_cc)), ] grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc] native.genrule( name =", "[ \"#if defined(STRATUM_ARCH_%s)\" % \"PPC\", include_fmt % \"ppc\", \"#elif defined(STRATUM_ARCH_%s)\"", "for service in services or []: if service == \"grpc\":", "return \"'\" + s + \"'\" # Emulate Python 2.5+", "stack. To use this, load() this file in a BUILD", "this. \"-D__GOOGLE_STL_LEGACY_COMPATIBILITY\", ] # Used for C and C++ compiler", "fail(\"service='%s' not in (grpc, rpc)\" % service) deps = depset(deps", "\".pb\", srcs = srcs + hdrs, arches = arches, visibility", "stay invalid. \"\"\" sep = \"/\" level = 0 result", "grpc_plugin = _GRPC_PLUGIN protoc_deps = [] for dep in deps:", "\"-I\" compilation options. testonly: Standard blaze testonly parameter. visibility: Standard", "[\"%s.tar.gz\" % name] # Copy our files into a temporary", "implementation because import os is not allowed in build defs.", "\"mkdir $${TEMP_DIR}\", \"cp -r %s $${TEMP_DIR}/tarball\" % _loc(fileset_name), \"if [[", "arch. HOST_ARCHES All host arches. STRATUM_INTERNAL For declaring Stratum internal", "that also have python support, and may not use the", "Analogous to cc_library strip_include_prefix argument. data: Files to provide as", "\"$${g3}/$(GENDIR)/%s/%s\" % (native.package_name(), arch) accum_flags = [] full_proto_include = None", "None, default = None): \"\"\"Public macro to alter blaze rules", "outs = grpc_pb_outs, tools = grpc_tools, cmd = \" &&", "= sc_platform_select(host = srcs or [], default = []), deps", "Args: path: A valid absolute or relative path to normalize.", "arches. Args: name: Analogous to cc_library name argument. deps: Analogous", "in lines] new_hdr_loc = \"$(location %s)\" % new_hdr_name cmds.append(\"{ %s;", "actual = sc_platform_select( default = default or \"//stratum/portage:dummy\", host =", "Note: Public sc_proto_lib invokes this once per (listed) arch; #", "native.Fileset( name = decorate(fileset_name, extension), out = decorate(name, extension), entries", "None and (host == None or ppc == None or", "] native.genrule( name = decorate(name, \"tarball\"), srcs = [\":%s\" %", ") def _gen_proto_shims(name, pb_modifier, srcs, arches, visibility): \"\"\"Macro to build", "Declare a portable Library. sc_proto_lib Declare a portable .proto Library.", "generated for every listed architecture. The names are decorated to", "[protoc_label, grpc_plugin] protoc = \"$${g3}/%s\" % _loc(protoc_label) grpc_plugin = \"$${g3}/%s\"", "rules that depend on this rule. Typically \".\" python_support: Defaults", "...]) def starts_with(s, prefix_list): for prefix in prefix_list: if s.startswith(prefix):", "generally supported). visibility: Standard blaze visibility parameter, passed through to", "name argument. srcs: Standard blaze srcs argument. deps: Standard blaze", "include '%s' doesn't match src %s\" % (full_proto_include, proto_src_loc)) #", "rules and filegroups for embedded protobuf library. For every given", "on an sc_proto_lib with python support.\") _gen_py_proto_lib( name = name,", "alias for host builds. ppc: The result of the alias", "%s)\" % new_hdr_name cmds.append(\"{ %s; } > %s\" % (\"", "files into a temporary directory and make any necessary changes", "if result: continue elif d == \"..\": if level >", "include flags to avoid generating # warnings. accum_flags.append( \"$$(if [[", "(full_proto_include, full_proto_include), ) accum_flags.append( \"$$(if [[ -e %s ]]; then", "path.pb.h consisting of: #ifdef logic to select path.${arch}.pb.h Also generates", "blaze testonly argument. \"\"\" regular_proto_name = decorate(name, \"default_pb\") py_name =", "sources in embedded sc_cc_lib compile steps. # This is more", "ppc = None, x86 = None, default = None, visibility", "(grpc, rpc)\" % service) deps = depset(deps or []) shim_rule", "normalize. Returns: A path equivalent to the input path with", "will generate a dummy binary and will not attempt to", "heuristic_label_expansion = 0, visibility = visibility, ) # Generate GRPC", "None or x86 == None): fail(\"Missing a select value for", "= regular_proto_name, srcs = srcs, deps = [decorate(dep, \"default_pb\") for", "(\"bin\", [\"%s.stripped\" % b for b in bins.to_list()]), (\"data\", data),", "def _gen_proto_shims(name, pb_modifier, srcs, arches, visibility): \"\"\"Macro to build .pb.h", "]), \"cd $${g3}\", \"cp %s.pb.h %s\" % (temp_stem, _loc(gen_pb_h)), \"cp", "add as \"-I\" compilation options. include_prefix: Analogous to cc_library include_prefix", "tarball package. and the variables/lists: ALL_ARCHES All known arches. EMBEDDED_ARCHES", "the alias for host builds. ppc: The result of the", "return sep.join(result) # Adds a suffix to a label, expanding", "elif label.startswith(\":\"): # :bat/baz -> bat/baz return label[1:] else: #", "target. If no selection is provided for a given platform", "path.proto, generates path.pb.h consisting of: #ifdef logic to select path.${arch}.pb.h", "2.5+ str(startswith([prefix ...]) def starts_with(s, prefix_list): for prefix in prefix_list:", "srcs + hdrs, arches = arches, visibility = visibility, )", "so quote accordingly. include_fmt = \"#include \" + dquote(pkg +", "include paths used for both sc_cc_lib and sc_proto_lib rules that", "Foundation # SPDX-License-Identifier: Apache-2.0 \"\"\"A portable build system for Stratum", "cc_test rule that doesn't break the build when an embedded", "_normpath(path): \"\"\"Normalize a path. Normalizes a path by removing unnecessary", "sc_platform_filter( deps, [\"//stratum/portage:dummy_with_main\"], arches, ), srcs = sc_platform_filter(srcs, [], arches),", "architecture. The names are decorated to keep them different and", "given portable binary and arches. Args: name: Analogous to cc_binary", ") else: temp_prefix = \"%s/%s\" % (cpp_out, native.package_name()) proto_rollups =", "= [my_proto_rollup], cmd = proto_rollup_cmd, visibility = visibility, testonly =", "arch), deps = grpc_dep_set, srcs = grpc_gen_srcs, hdrs = hdrs", "\"true\", ) sc_platform_alias( name = name, host = decorate(name, \"host\")", "'%s' doesn't match src %s\" % (full_proto_include, proto_src_loc)) # By", "else None, visibility = visibility, ) return shim_rule def _gen_py_proto_lib(name,", "None): \"\"\"Public macro to alter blaze rules based on the", "package. and the variables/lists: ALL_ARCHES All known arches. EMBEDDED_ARCHES All", "= [\"g3=$${PWD}\"] # TODO(unknown): Switch protobuf to using the proto_include", "from the given srcs. There's no clean way to make", "None): \"\"\"Creates rules for the given portable library and arches.", "producing rules) to be packaged. deps: List of other sc_packages", "+ protobuf_srcs + [my_proto_rollup]) gen_srcs = [] gen_hdrs = []", "\"tarball\"), srcs = [\":%s\" % fileset_name], outs = outs, cmd", "(cpp_out, native.package_name()) proto_rollups = [ decorate(decorate(dep, arch), \"proto_rollup.flags\") for dep", "As above, but also exported for dependent rules to utilize.", "cc_test name argument. size: Analogous to cc_test size argument. srcs:", "in label: # .../bar:bat -> .../bar:bat_suffix return \"%s_%s\" % (label,", "= \"%s/%s\" % (native.package_name(), src) if proto_src_loc.startswith(full_proto_include + \"/\"): proto_src_loc", "def sc_platform_select(host = None, ppc = None, x86 = None,", "== None and (host == None or ppc == None", "not s.endswith(\".h\")]: alwayslink = 1 if not arches: arches =", "= sc_platform_filter(hdrs, [], arches), alwayslink = alwayslink, copts = sc_platform_filter(copts,", "for extension, inputs in [ (\"bin\", [\"%s.stripped\" % b for", "+ 1:] else: print(\"Invalid proto include '%s' doesn't match src", "\"sc_cc_lib\", label_regex_for_dep = \"{extension_name}\", ) def sc_cc_bin( name, deps =", "alias that will select the appropriate proto target based on", "= protoc_srcs_set, outs = pb_outs, tools = tools, cmd =", "depset(data or []) deps = depset(deps or []) if not", "= _gen_proto_shims( name = name, pb_modifier = \".pb\", srcs =", "as long as the arches are disjoint. sc_cc_lib( name =", "portable Library. sc_proto_lib Declare a portable .proto Library. sc_cc_bin Declare", "to cc_test name argument. size: Analogous to cc_test size argument.", "architecture. If no selection is provided for a given platform,", "all filesets. \"\"\" bins = depset(bins or []) data =", "extension), out = decorate(name, extension), entries = [ native.FilesetEntry( files", "= None, visibility = None): \"\"\"Public macro to create an", "hdrs, arches = arches, visibility = visibility, ) grpc_shim_rule =", "EMBEDDED_ARCHES (HOST_ARCHES not generally supported). visibility: Standard blaze visibility parameter,", "Build options for all embedded architectures # # Set _TRACE_SRCS", "arch in arches: _gen_proto_lib( name = name, srcs = srcs,", "[my_proto_rollup]) gen_srcs = [] gen_hdrs = [] grpc_gen_hdrs = []", "generate: :${src}_${arch}.pb rule to run protoc ${src}.proto => ${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb", "= textual_hdrs | depset(deps.to_list()) cc_library( name = name, deps =", "srcs or [], default = []), deps = sc_platform_select( host", "proto_src_loc[len(full_proto_include) + 1:] else: print(\"Invalid proto include '%s' doesn't match", "grpc_gen_srcs.append(gen_grpc_pb_cc) cmds = bash_vars + [ \"mkdir -p %s\" %", "Name of \"x86\" arch. HOST_ARCH Name of default \"host\" arch.", "\"sc_cc_test\", label_regex_for_dep = \"{extension_name}\", ) def sc_cc_lib( name, deps =", "blaze rule based on the target platform architecture. If no", "the given portable library and arches. Args: name: Analogous to", "= 1 if not arches: arches = ALL_ARCHES defs_plus =", "visibility = visibility, ) register_extension_info( extension_name = \"sc_cc_lib\", label_regex_for_dep =", "-o $(@)\" % \"; \".join(proto_rollup_cmds) native.genrule( name = decorate(name_arch, \"proto_rollup\"),", "Portability definitions =================================================== def sc_cc_test( name, size = None, srcs", "proto_path_cmds.append(\"[[ -e %s ]] || cd $(GENDIR)\" % proto_src_loc) gendir_include", "= gen_stem + \".grpc.pb.cc\" grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc) cmds = bash_vars +", "deps = [regular_proto_name], visibility = visibility, testonly = testonly, )", "mechanism protoc_label = _PROTOC protobuf_label = _PROTOBUF protobuf_hdrs = \"%s:well_known_types_srcs\"", "copts: Analogous to cc_library copts argument. defines: Symbols added as", "depend on sc_proto_libs that also have python support, and may", "suffix) # Creates a relative filename from a label, replacing", "\"then cd %s\" % full_proto_include, \"else cd $(GENDIR)/%s\" % full_proto_include,", "py_name = decorate(name, \"py\") proto_library( name = regular_proto_name, srcs =", "sc_proto_lib with python support.\") _gen_py_proto_lib( name = name, srcs =", "for, None => ALL. visibility: Standard blaze visibility parameter, passed", "None, data = None, deps = None, arches = None,", "for f in $${TEMP_DIR}/tarball/bin/*.stripped\", \" do mv $${f} $${f%.stripped}\", #", "\"\"\"Creates a cc_test rule that interacts safely with Stratum builds.", "to cc_test defines argument. copts: Analogous to cc_test copts argument.", "for dep in deps if dep.endswith(\"_proto\") ] proto_rollup_cmds = [\"printf", "(file) dependencies of this library - no decorations assumed, used", "if full_proto_include: temp_prefix = \"%s/%s\" % (cpp_out, native.package_name()[len(full_proto_include):]) # We", "defined(STRATUM_ARCH_%s)\" % \"X86\", include_fmt % \"x86\", \"#elif defined(STRATUM_ARCH_%s)\" % \"HOST\",", "decorate(name, \"ppc\") if \"ppc\" in arches else None, x86 =", "[\"rollup=$$(sed \\\"s,G3LOC,$${PWD},g\\\" %s)\" % _loc(my_proto_rollup)] proto_rollup_flags = [\"$${rollup}\"] if proto_include:", "[]): \"\"\"Public macro to build multi-arch library from Message protobuf(s).", "# warnings. accum_flags.append( \"$$(if [[ -e $(GENDIR)/%s ]]; then echo", "other sc_packages to add to this package. arches: Which architectures", "name for each arch; # multiple such calls are OK", "\"shims\") native.genrule( name = shim_rule, srcs = srcs, outs =", "= name, srcs = srcs, hdrs = [shim_rule] + hdrs,", "if needed. def decorate(label, suffix): if label.endswith(\":\"): # .../bar: ->", "of default \"host\" arch. HOST_ARCHES All host arches. STRATUM_INTERNAL For", "= \"sc_cc_test\", label_regex_for_dep = \"{extension_name}\", ) def sc_cc_lib( name, deps", "native.package_name() elif proto_include: full_proto_include = \"%s/%s\" % (native.package_name(), proto_include) if", "generates path.pb.h consisting of: #ifdef logic to select path.${arch}.pb.h Also", "\"sc_cc_bin\", label_regex_for_dep = \"{extension_name}\", ) # Protobuf ================================================================= _SC_GRPC_DEPS =", "in srcs.to_list() if not s.endswith(\".h\")]: alwayslink = 1 if not", "Args: target: Blaze target name available to this build. Returns:", "options. includes: Paths to add as \"-I\" compilation options. testonly:", "compile this proto. native.filegroup( name = decorate(name_arch, \"headers\"), srcs =", "src[0:-6] src_arch = \"%s_%s\" % (src_stem, arch) temp_stem = \"%s/%s\"", "\"\"\" bins = depset(bins or []) data = depset(data or", "depset(deps or []) if not arches: arches = EMBEDDED_ARCHES fileset_name", "name of the grpc shim for this proto lib. \"\"\"", "visibility, ) return shim_rule def _gen_py_proto_lib(name, srcs, deps, visibility, testonly):", "= depset(bins or []) data = depset(data or []) deps", "name: Standard blaze name argument. srcs: Standard blaze srcs argument.", "] # Used for C and C++ compiler invocations. _EMBEDDED_CFLAGS", "passed through to subsequent rules. testonly: Standard blaze testonly parameter.", "hdrs: More files to build into this library, but also", "name = None, srcs = [], hdrs = [], deps", "and allow all to be generated and addressed independently. This", "Google LLC # Copyright 2018-present Open Networking Foundation # SPDX-License-Identifier:", "protobuf library. For every given ${src}.proto, generate: :${src}_${arch}.pb rule to", "input. * ${name}_py a py_proto_library version of this library. Only", "grpc_name = name[:-6] + \"_grpc_proto\" grpc_dep_set = dep_set | [name]", "\"rpc\": service_enable[\"grpc\"] = 1 else: fail(\"service='%s' not in (grpc, rpc)\"", "../../dir/to/other/path Args: path: A valid absolute or relative path to", "] + platform_entries, visibility = visibility, ) outs = [\"%s.tar.gz\"", "to another directory before protoc, so # adjust our .proto", "sc_cc_test( name, size = None, srcs = None, deps =", "= gen_stem + \".pb.h\" gen_pb_cc = gen_stem + \".pb.cc\" gen_hdrs.append(gen_pb_h)", "target based on the currently selected platform architecture. Args: name:", "the currently selected platform architecture. Args: name: Base name for", "and cc_binary(), but with different options and some restrictions. The", "a blaze rule based on the target platform architecture. If", "= testonly, ) register_extension_info( extension_name = \"sc_proto_lib\", label_regex_for_dep = \"{extension_name}\",", "my_proto_rollup = decorate(name_arch, \"proto_rollup.flags\") protoc_srcs_set = (srcs + hdrs +", "= name, srcs = depset(srcs + hdrs), deps = deps,", "= sc_platform_filter(deps, [], arches), srcs = sc_platform_filter(srcs, [], arches), hdrs", "hdr_ext outs.append(new_hdr_name) # Generate lines for shim switch file. #", "before protoc, so # adjust our .proto path accordingly. proto_src_loc", "name, deps = sc_platform_filter(deps, [], arches), srcs = sc_platform_filter(srcs, [],", "srcs = gen_srcs, hdrs = hdrs + gen_hdrs, arches =", "label helpers. ============================================ def _normpath(path): \"\"\"Normalize a path. Normalizes a", "_loc(gen_pb_cc)), ] pb_outs = [gen_pb_h, gen_pb_cc] native.genrule( name = src_arch", "$${TEMP_DIR}\", ] native.genrule( name = decorate(name, \"tarball\"), srcs = [\":%s\"", "or []) xdeps = depset(xdeps or []) copts = depset(copts", "else kill feature. _TRACE_SRCS = False # Used for all", "generate this way. copts: Analogous to cc_library copts argument. defines:", "% grpc_plugin, ] + gendir_include + proto_rollup_flags + [ \"-I%s\"", "files = inputs, ), ] + [ native.FilesetEntry(srcdir = decorate(dep,", "= copts, linkopts = linkopts, visibility = visibility, ) register_extension_info(", "ppc toolchain for Stratum. # This means that we must", "with python support may only depend on sc_proto_libs that also", "Analogous to cc_library copts argument. defines: Symbols added as \"-D\"", "&& \".join(cmds), heuristic_label_expansion = 0, visibility = visibility, ) #", "an embedded arch is selected. During embedded builds this target", "-IG3LOC/$(GENDIR)/%s; fi)\" % (full_proto_include, full_proto_include), ) accum_flags.append( \"$$(if [[ -e", "deps = [decorate(dep, \"default_pb\") for dep in deps], visibility =", "for any of {host,ppc,x86} that isn't specified. Returns: The requested", "blaze visibility parameter. xdeps: External (file) dependencies of this library", "A path equivalent to the input path with minimal use", "suboptimal - something along the lines of augmenting context with", "arches. STRATUM_INTERNAL For declaring Stratum internal visibility. The macros are", "a label, replacing \"//\" and \":\". def _make_filename(label): if label.startswith(\"//\"):", "arches, ), data = sc_platform_filter(data, [], arches), visibility = visibility,", "# # Set _TRACE_SRCS to show sources in embedded sc_cc_lib", "= None if (service_enable[\"grpc\"]): grpc_shim_rule = _gen_proto_shims( name = decorate(name[:-6],", "to use for ppc builds. x86: The value to use", "way. copts: Analogous to cc_binary copts argument. defines: Symbols added", "(temp_stem, _loc(gen_pb_h)), \"cp %s.pb.cc %s\" % (temp_stem, _loc(gen_pb_cc)), ] pb_outs", "native.Fileset( name = fileset_name, out = name, entries = [", "%s ]] || cd $(GENDIR)\" % proto_src_loc) gendir_include = [\"-I$(GENDIR)\",", "# Generates an sc_platform_select based on a textual list of", "the grpc shim for this proto lib. \"\"\" bash_vars =", "$(GENDIR)/%s\" % full_proto_include, \"fi\", ])) gendir_include = [\"-I$${g3}/$(GENDIR)\", \"-I$${g3}\", \"-I.\"]", "native.genrule( name = src_arch + \".pb\", srcs = protoc_srcs_set, outs", "a py_proto_library version of this library. Only generated if python_support", "Adds squotes around a string. def squote(s): return \"'\" +", "Which architecture to build this library for. visibility: Standard blaze", "context with a user defined configuration fragment would be a", "Copyright 2018 Google LLC # Copyright 2018-present Open Networking Foundation", "select the appropriate target. If no selection is provided for", "sc_cc_bin Declare a portable Binary. sc_package Declare a portable tarball", "= [ \"//stratum:__subpackages__\", ] # # Build options for all", "outs = [] cmds = [] hdr_ext = pb_modifier +", "EMBEDDED_X86, ] HOST_ARCH = \"host\" HOST_ARCHES = [HOST_ARCH] ALL_ARCHES =", "above. * ${name}_${arch}_pb protobuf compile rules - one for each", "\"\"\"Creates rules for the given portable binary and arches. Args:", "[\"printf '%%s\\n' %s\" % flag for flag in accum_flags] proto_rollup_cmds.append(\"cat", "= [ # \"-static\", # Use this for program-sizing build", "% _loc(name + \".tar.gz\"), \"rm -rf $${TEMP_DIR}\", ] native.genrule( name", "for ppc builds. x86: The value to use for x86", "arch. * sc_cc_lib(name) with those as input. * ${name}_py a", "arches = arches, visibility = visibility, ) grpc_shim_rule = None", "\"-Werror\", # Warn lots, and force fixing warnings. \"-no-canonical-prefixes\", #", "do a bit of extra work with these include flags", "target) \"\"\" return \"$(location %s)\" % target def _gen_proto_lib( name,", ") if python_support: if proto_include: fail(\"Cannot use proto_include on an", "[\":libs\"], destdir = \"lib/stratum\", symlinks = \"dereference\", )], default =", "Also generates an alias that will select the appropriate proto", "default: The result of the alias for any of {host,ppc,x86}", "visibility of the alias target. \"\"\" native.alias( name = name,", "0, visibility = visibility, ) # Generate GRPC if grpc_shim_rule:", "= strip_include_prefix, testonly = testonly, textual_hdrs = sc_platform_filter( textual_plus |", "arches this shim should support. visibility: The blaze visibility of", "invocations. _EMBEDDED_CXXFLAGS = [ \"-std=gnu++11\", # Allow C++11 features _and_", "independently. This aspect of the system is suboptimal - something", "arch. HOST_ARCH Name of default \"host\" arch. HOST_ARCHES All host", "is not allowed in build defs. For example ../../dir/to/deeply/nested/path/../../../other/path will", "# TODO(unknown): if useful again then inject from cmdline else", "textual_hdrs = depset(textual_hdrs or []) if srcs: if [s for", "a label, expanding implicit targets if needed. def decorate(label, suffix):", "+ protoc_deps, visibility = visibility, ) my_proto_rollup = decorate(name_arch, \"proto_rollup.flags\")", "\"--grpc-cpp_out=%s\" % cpp_out, proto_src_loc, ]), \"cd $${g3}\", \"cp %s.grpc.pb.h %s\"", "other portability hacks. _ARCH_DEFINES = sc_platform_select( default = [\"STRATUM_ARCH_HOST\"], ppc", "[ \" \".join([ protoc, \"--plugin=protoc-gen-grpc-cpp=%s\" % grpc_plugin, ] + gendir_include", "testonly parameter. proto_include: Include path for generated sc_cc_libs. grpc_shim_rule: If", "for every target platform. Args: host: The value to use", "name for this library. srcs: List of .proto files -", "library from this rule. Any sc_proto_lib with python support may", "mapped to bin/ and share/ respectively. * ${name}_${arch}_tarball rule builds", "tools, cmd = \" && \".join(cmds), heuristic_label_expansion = 0, visibility", "depending on normal proto_library rules. def sc_proto_lib( name = None,", "Analogous to cc_test size argument. srcs: Analogous to cc_test srcs", "to provide as data at runtime (host builds only). testonly:", "hdrs, arches = arches, visibility = visibility, ) for arch", "addressed independently. This aspect of the system is suboptimal -", "switch stack. To use this, load() this file in a", "arches: List of arches this shim should support. visibility: The", "\"if [[ -e $${TEMP_DIR}/tarball/bin ]]\", \"then for f in $${TEMP_DIR}/tarball/bin/*.stripped\",", "arch, visibility, testonly, proto_include, grpc_shim_rule): \"\"\"Creates rules and filegroups for", "library. srcs: List of .proto files - private to this", "# We'll use tcmalloc \"-fno-builtin-calloc\", \"-fno-builtin-realloc\", \"-fno-builtin-free\", \"-D__STDC_FORMAT_MACROS=1\", # TODO(unknown):", "\"tar czf %s -h -C $${TEMP_DIR}/tarball .\" % _loc(name +", "build definitions. ============================================== EMBEDDED_PPC = \"ppc\" EMBEDDED_X86 = \"x86\" EMBEDDED_ARCHES", "one for each arch. * sc_cc_lib(name) with those as input.", "warnings. accum_flags.append( \"$$(if [[ -e $(GENDIR)/%s ]]; then echo -IG3LOC/$(GENDIR)/%s;", "_loc(target): \"\"\"Return target location for constructing commands. Args: target: Blaze", "be in GENDIR! proto_path_cmds.append(\"; \".join([ \"if [[ -e %s ]]\"", "show sources in embedded sc_cc_lib compile steps. # This is", "hdrs = depset(hdrs or []) xdeps = depset(xdeps or [])", "= decorate(dep, extension)) for dep in deps.to_list() ], visibility =", "= testonly, textual_hdrs = sc_platform_filter( textual_plus | xdeps, [], arches,", "generated sc_cc_libs. grpc_shim_rule: If needed, the name of the grpc", "$${f} $${f%.stripped}\", # rename not available. \"done\", \"fi\", \"tar czf", "\"cp -r %s $${TEMP_DIR}/tarball\" % _loc(fileset_name), \"if [[ -e $${TEMP_DIR}/tarball/bin", "pb_modifier: protoc plugin-dependent file extension (e.g.: .pb) srcs: List of", "_SC_GRPC_DEPS grpc_gen_hdrs_plus = grpc_gen_hdrs + gen_hdrs sc_cc_lib( name = decorate(grpc_name,", "[ native.FilesetEntry(srcdir = decorate(dep, extension)) for dep in deps.to_list() ],", "\"/\") elif label.startswith(\":\"): # :bat/baz -> bat/baz return label[1:] else:", "a much cleaner solution. Currently supported architectures: ppc x86 \"\"\"", "label = label[:-1] if \":\" in label: # .../bar:bat ->", ".pb) srcs: List of proto files. arches: List of arches", "any platform specific files to the final tarball. platform_entries =", "data = None, testonly = None, textual_hdrs = None, visibility", "native.alias( name = name, actual = sc_platform_select( default = default", "paths will stay invalid. \"\"\" sep = \"/\" level =", "srcs = [], hdrs = [], deps = [], arches", "or default must be provided for every target platform. Args:", "the alias target. \"\"\" native.alias( name = name, actual =", "testonly: Standard blaze testonly parameter. textual_hdrs: Analogous to cc_library. visibility:", "name[:-6] + \"_grpc_proto\" grpc_dep_set = dep_set | [name] | _SC_GRPC_DEPS", "\"-Wl,--gc-sections,--no-wchar-size-warning\", # Use this for program-sizing build ] # PPC", "lots, and force fixing warnings. \"-no-canonical-prefixes\", # Don't mangle paths", "1 result.append(d) return sep.join(result) # Adds a suffix to a", "or []) + _ARCH_DEFINES cc_binary( name = name, deps =", "pb_modifier + \".h\" for src in srcs: pkg, filename =", "architecture to build this library for. visibility: Standard blaze visibility", "files - private to this library. hdrs: As above, but", "this library for. visibility: Standard blaze visibility parameter, passed through", "= None, includes = None, testonly = None, visibility =", "use tcmalloc \"-fno-builtin-calloc\", \"-fno-builtin-realloc\", \"-fno-builtin-free\", \"-D__STDC_FORMAT_MACROS=1\", # TODO(unknown): Figure out", "[], includes = includes, testonly = testonly, textual_hdrs = gen_hdrs,", "The public symbols are the macros: decorate(path) sc_cc_lib Declare a", "arches, ), srcs = sc_platform_filter(srcs, [], arches), copts = copts,", "shim for this proto lib. \"\"\" bash_vars = [\"g3=$${PWD}\"] #", "\" \".join([ protoc, \"--plugin=protoc-gen-grpc-cpp=%s\" % grpc_plugin, ] + gendir_include +", "to the input path with minimal use of path-up segments.", "protoc, \"--plugin=protoc-gen-grpc-cpp=%s\" % grpc_plugin, ] + gendir_include + proto_rollup_flags +", "compile rules - one for each arch. * sc_cc_lib(name) with", "that we must provide portable shared libs for our ppc", "deps = dep_set, srcs = gen_srcs, hdrs = hdrs +", "files. arches: List of arches this shim should support. visibility:", "\"rpc\" and \"grpc\" are equivalent. \"\"\" if not arches: if", "shims and other portability hacks. _ARCH_DEFINES = sc_platform_select( default =", "_EMBEDDED_LDFLAGS = [ # \"-static\", # Use this for program-sizing", "= \"%s/%s\" % (native.package_name(), src) proto_path_cmds.append(\"[[ -e %s ]] ||", "service in services or []: if service == \"grpc\": service_enable[\"grpc\"]", "sep = \"/\" level = 0 result = [] for", "example ../../dir/to/deeply/nested/path/../../../other/path will become ../../dir/to/other/path Args: path: A valid absolute", "= \"sc_proto_lib\", label_regex_for_dep = \"{extension_name}\", ) def sc_package( name =", "\"//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot\" # X86 ====================================================================== _X86_GRTE = \"//grte/v4_x86/release/usr/grte/v4\" # Portability definitions", "protobuf_include = \"$${g3}/protobuf/src\" if arch in EMBEDDED_ARCHES: grpc_plugin = _SC_GRPC_PLUGIN", "and filegroups for embedded protobuf library. For every given ${src}.proto,", "= \"%s/%s\" % (temp_prefix, src_stem) gen_stem = \"%s.%s\" % (src_stem,", ") register_extension_info( extension_name = \"sc_cc_bin\", label_regex_for_dep = \"{extension_name}\", ) #", "to cc_test visibility argument. \"\"\" cc_test( name = name, size", "proto_src_loc.startswith(full_proto_include + \"/\"): proto_src_loc = proto_src_loc[len(full_proto_include) + 1:] else: print(\"Invalid", "force protoc to produce # different symbols. Careful, our proto", "None, ppc = decorate(name, \"ppc\") if \"ppc\" in arches else", "string. def squote(s): return \"'\" + s + \"'\" #", "For each src path.proto, generates path.pb.h consisting of: #ifdef logic", "use $${PWD} until this step, because our rollup command #", "+ platform_entries, visibility = visibility, ) outs = [\"%s.tar.gz\" %", "platform architecture. If no selection is provided for a given", "$(CC_FLAGS) instead of this. \"-D__GOOGLE_STL_LEGACY_COMPATIBILITY\", ] # Used for C", "target is used instead. Args: name: The name of the", "of proto files hdrs: More files to build into this", "= None, include_prefix = None, strip_include_prefix = None, data =", "= grpc_gen_hdrs + gen_hdrs sc_cc_lib( name = decorate(grpc_name, arch), deps", "binaries are generated for every listed architecture. The names are", "python proto library from this rule. Any sc_proto_lib with python", "= srcs, hdrs = [shim_rule] + hdrs, deps = deps,", "generated and addressed independently. This aspect of the system is", "for x86 builds. default: The result of the alias for", "protoc plugin-dependent file extension (e.g.: .pb) srcs: List of proto", "LLC # Copyright 2018-present Open Networking Foundation # SPDX-License-Identifier: Apache-2.0", "create an alias that changes based on target arch. Generates", "paths used for both sc_cc_lib and sc_proto_lib rules that depend", "this library - no decorations assumed, used and exported as", "Args: name: Standard blaze name argument. srcs: Standard blaze srcs", "#ifdef logic to select path.${arch}.pb.h Also generates an alias that", "the lines of augmenting context with a user defined configuration", "srcdir = decorate(name, \"bin\"), destdir = \"bin\", ), native.FilesetEntry( srcdir", "fileset_name], outs = outs, cmd = \"; \".join(cmds), visibility =", "src in srcs: pkg, filename = parse_label(src) if not filename.endswith(\".proto\"):", "srcs, hdrs, deps, arch, visibility, testonly, proto_include, grpc_shim_rule): \"\"\"Creates rules", "sc_platform_select(host = srcs or [], default = []), deps =", "proto_path_cmds + [ \" \".join([protoc] + gendir_include + proto_rollup_flags +", "arches = None, visibility = None): \"\"\"Public macro to package", "library. hdrs: As above, but also exported for dependent rules", "arch) temp_stem = \"%s/%s\" % (temp_prefix, src_stem) gen_stem = \"%s.%s\"", "-> bat/baz return label # Adds dquotes around a string.", "architectures to generate this way. copts: Analogous to cc_binary copts", "_PPC_GRTE = \"//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot\" # X86 ====================================================================== _X86_GRTE = \"//grte/v4_x86/release/usr/grte/v4\" #", "if service == \"grpc\": service_enable[\"grpc\"] = 1 elif service ==", "visibility = visibility, ) register_extension_info( extension_name = \"sc_cc_test\", label_regex_for_dep =", "+ _ARCH_DEFINES cc_binary( name = name, deps = sc_platform_filter( deps,", "# rename not available. \"done\", \"fi\", \"tar czf %s -h", "[\":%s\" % fileset_name], outs = outs, cmd = \"; \".join(cmds),", "= defs_plus, includes = sc_platform_filter(includes, [], arches), include_prefix = include_prefix,", "different options and some restrictions. The key difference: you can", "# Adds a suffix to a label, expanding implicit targets", "name = decorate(name, arch), deps = dep_set, srcs = gen_srcs,", "name = name, deps = sc_platform_filter(deps, [], arches), srcs =", "decorate(grpc_name, arch), deps = grpc_dep_set, srcs = grpc_gen_srcs, hdrs =", "= None, visibility = None): \"\"\"Public macro to package binaries", "temp_stem = \"%s/%s\" % (temp_prefix, src_stem) gen_stem = \"%s.%s\" %", "- something along the lines of augmenting context with a", "= name[:-6] + \"_grpc_proto\" grpc_dep_set = dep_set | [name] |", "% (\" && \".join(gen_cmds), new_hdr_loc)) shim_rule = decorate(name, \"shims\") native.genrule(", "None, copts = None, defines = None, includes = None,", "arch = arch, visibility = visibility, testonly = testonly, proto_include", "rule to run protoc ${src}.proto => ${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb rule to", "label # Adds dquotes around a string. def dquote(s): return", "this library, but also exported for dependent rules to utilize.", "\"{extension_name}\", ) # Protobuf ================================================================= _SC_GRPC_DEPS = [ \"//sandblaze/prebuilt/grpc\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_base\",", "name, srcs, hdrs, deps, arch, visibility, testonly, proto_include, grpc_shim_rule): \"\"\"Creates", "Generates a blaze select(...) statement that can be used in", "arches), include_prefix = include_prefix, strip_include_prefix = strip_include_prefix, testonly = testonly,", "_SC_GRPC_PLUGIN else: grpc_plugin = _GRPC_PLUGIN protoc_deps = [] for dep", "gen_hdrs, visibility = visibility, ) if grpc_shim_rule: grpc_name = name[:-6]", "elif proto_include: full_proto_include = \"%s/%s\" % (native.package_name(), proto_include) if full_proto_include:", "\"//sandblaze/prebuilt/grpc:grpc++_codegen_base\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib\", ] _PROTOC = \"@com_google_protobuf//:protobuf:protoc\" _PROTOBUF = \"@com_google_protobuf//:protobuf\" _SC_GRPC_PLUGIN", "* sc_cc_lib(name) with those as input. * ${name}_py a py_proto_library", "= x86, ), visibility = visibility, ) # Embedded build", "= [\"-I$(GENDIR)\", \"-I.\"] # Generate messages gen_pb_h = gen_stem +", "= depset(data or []) deps = depset(deps or []) if", "of extra work with these include flags to avoid generating", "= default or \"//stratum/portage:dummy\", host = host, ppc = ppc,", "host = host, ppc = ppc, x86 = x86, ),", "again then inject from cmdline else kill feature. _TRACE_SRCS =", "to cc_library deps argument. srcs: Analogous to cc_library srcs argument.", "= hdrs + grpc_gen_hdrs_plus + [grpc_shim_rule], arches = [arch], copts", "blaze visibility of the generated alias. Returns: Name of shim", "or x86 == None): fail(\"Missing a select value for at", "= visibility, ) # Embedded build definitions. ============================================== EMBEDDED_PPC =", "cc_test( name = name, size = size or \"small\", srcs", "cc_library. visibility: Standard blaze visibility parameter. xdeps: External (file) dependencies", "arches), defines = defs_plus, includes = sc_platform_filter(includes, [], arches), include_prefix", "deps: if dep.endswith(\"_proto\"): protoc_deps.append(\"%s_%s_headers\" % (dep, arch)) name_arch = decorate(name,", "= label[:-1] if \":\" in label: # .../bar:bat -> .../bar:bat_suffix", "$${PWD} until this step, because our rollup command # might", "data argument. defines: Analogous to cc_test defines argument. copts: Analogous", "= None): \"\"\"Creates rules for the given portable library and", "= hdrs + protoc_deps, visibility = visibility, ) my_proto_rollup =", "name = decorate(fileset_name, extension), out = decorate(name, extension), entries =", "master switch for sc_proto_lib. For each src path.proto, generates path.pb.h", "decorate(fileset_name, extension), out = decorate(name, extension), entries = [ native.FilesetEntry(", "= sc_platform_filter(includes, [], arches), include_prefix = include_prefix, strip_include_prefix = strip_include_prefix,", "bar_suffix return \"%s_%s\" % (label, suffix) # Creates a relative", "given portable library and arches. Args: name: Analogous to cc_library", "name, size = size or \"small\", srcs = sc_platform_select(host =", "srcs = depset(srcs or []) hdrs = depset(hdrs or [])", "Files to provide as data at runtime (host builds only).", "name = None, bins = None, data = None, deps", "arches), srcs = sc_platform_filter(srcs, [], arches), hdrs = sc_platform_filter(hdrs, [],", "[ native.FilesetEntry( srcdir = decorate(name, \"bin\"), destdir = \"bin\", ),", ") native.py_proto_library( name = py_name, api_version = 2, deps =", "this simple. For library \"name\", generates: * ${name}_default_pb, a regular", "gendir_include = [\"-I$${g3}/$(GENDIR)\", \"-I$${g3}\", \"-I.\"] else: proto_src_loc = \"%s/%s\" %", "= [\"-I$${g3}/$(GENDIR)\", \"-I$${g3}\", \"-I.\"] else: proto_src_loc = \"%s/%s\" % (native.package_name(),", "= decorate(name_arch, \"proto_rollup\"), srcs = proto_rollups, outs = [my_proto_rollup], cmd", "(\"%s/%s\" % (full_proto_include, proto_src_loc)), \"then cd %s\" % full_proto_include, \"else", "arches else None, visibility = visibility, ) return shim_rule def", "% (native.package_name(), src) if proto_src_loc.startswith(full_proto_include + \"/\"): proto_src_loc = proto_src_loc[len(full_proto_include)", ") def sc_package( name = None, bins = None, data", "[]) if not arches: arches = EMBEDDED_ARCHES fileset_name = decorate(name,", "\"\"\" regular_proto_name = decorate(name, \"default_pb\") py_name = decorate(name, \"py\") proto_library(", "srcs + hdrs: if src.endswith(\".proto\"): src_stem = src[0:-6] src_arch =", "= dep_set, srcs = gen_srcs, hdrs = hdrs + gen_hdrs,", "also have python support, and may not use the proto_include", "platform architecture. Args: name: Base name for this library. pb_modifier:", "hdrs + protoc_deps, visibility = visibility, ) my_proto_rollup = decorate(name_arch,", "more general than it may seem: genrule doesn't have hdrs", "visibility: Standard blaze visibility parameter, passed through to subsequent rules.", "% (full_proto_include, full_proto_include), ) accum_flags.append( \"$$(if [[ -e %s ]];", "proto_include, grpc_shim_rule = grpc_shim_rule, ) if python_support: if proto_include: fail(\"Cannot", "provided for every target platform. Args: host: The value to", "same name for each arch; # multiple such calls are", "argument. deps: Standard blaze deps argument. visibility: Standard blaze visibility", "= [\"rollup=$$(sed \\\"s,G3LOC,$${PWD},g\\\" %s)\" % _loc(my_proto_rollup)] proto_rollup_flags = [\"$${rollup}\"] if", "flag for flag in accum_flags] proto_rollup_cmds.append(\"cat $(SRCS)\") proto_rollup_cmd = \"{", "Invalid input paths will stay invalid. \"\"\" sep = \"/\"", "with same name for each arch; # multiple such calls", "for deployment. For package \"name\", generates: * ${name}_${arch}_bin and ${name}_${arch}_data", "_PROTOBUF = \"@com_google_protobuf//:protobuf\" _SC_GRPC_PLUGIN = \"//sandblaze/prebuilt/protobuf:grpc_cpp_plugin\" _GRPC_PLUGIN = \"//grpc:grpc_cpp_plugin\" def", "least one platform in \" + \"sc_platform_select. Please add.\") config_label_prefix", "dependent rules to utilize. deps: List of deps for this", "to cc_library srcs argument. hdrs: Analogous to cc_library hdrs argument.", "Unknown STRATUM_ARCH\", \"#endif\", ] gen_cmds = [(\"printf '%%s\\\\n' '%s'\" %", "visibility, ) my_proto_rollup = decorate(name_arch, \"proto_rollup.flags\") protoc_srcs_set = (srcs +", "architectures to generate this way. copts: Analogous to cc_library copts", "not generally supported). visibility: Standard blaze visibility parameter, passed through", "\"default_pb\") py_name = decorate(name, \"py\") proto_library( name = regular_proto_name, srcs", "fragment would be a much cleaner solution. Currently supported architectures:", "in arches else default, ) def sc_platform_alias( name, host =", "host = decorate(name, \"host\") if \"host\" in arches else None,", "//foo/bar:bar_suffix return \"%s:%s_%s\" % (label, label.split(\"/\")[-1], suffix) else: # bar", "Analogous to cc_binary copts argument. defines: Symbols added as \"-D\"", "so all embedded dependencies appear as a `src'. # TODO(unknown):", "Any sc_proto_lib with python support may only depend on sc_proto_libs", "deps for this library arch: Which architecture to build this", "blaze select(...) statement that can be used in most contexts", "sc_proto_lib's proto_include field, so we keep this simple. For library", "can use $(CC_FLAGS) instead of this. \"-D__GOOGLE_STL_LEGACY_COMPATIBILITY\", ] # Used", "= decorate(name, \"default_pb\") py_name = decorate(name, \"py\") proto_library( name =", "= None, defines = None, includes = None, testonly =", "-e $(GENDIR)/%s ]]; then echo -IG3LOC/$(GENDIR)/%s; fi)\" % (full_proto_include, full_proto_include),", "proto_rollups, outs = [my_proto_rollup], cmd = proto_rollup_cmd, visibility = visibility,", "\"/\" + hdr_stem + \".%s\" + hdr_ext) lines = [", "sc_cc_lib(name) with those as input. * ${name}_py a py_proto_library version", "), ] + platform_entries, visibility = visibility, ) outs =", "sc_platform_select( default = default or \"//stratum/portage:dummy\", host = host, ppc", "srcs = protoc_srcs_set, outs = pb_outs, tools = tools, cmd", "needed, the name of the grpc shim for this proto", "(native.package_name(), arch) accum_flags = [] full_proto_include = None if proto_include", "blaze rules based on the platform architecture. Generates a blaze", "(temp_prefix, src_stem) gen_stem = \"%s.%s\" % (src_stem, arch) # We", "command # might be generated on another forge server. proto_path_cmds", "= gen_stem + \".pb.cc\" gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc) cmds = bash_vars +", "# X86 ====================================================================== _X86_GRTE = \"//grte/v4_x86/release/usr/grte/v4\" # Portability definitions ===================================================", "name, entries = [ native.FilesetEntry( srcdir = decorate(name, \"bin\"), destdir", "sc_platform_select( # We use a different ppc toolchain for Stratum.", "proto include '%s' doesn't match src %s\" % (full_proto_include, proto_src_loc))", "or relative path to normalize. Returns: A path equivalent to", "sc_platform_select( host = deps or [], default = [\"//stratum/portage:dummy_with_main\"], ),", "argument. \"\"\" regular_proto_name = decorate(name, \"default_pb\") py_name = decorate(name, \"py\")", "corresponding bin and data filesets, mapped to bin/ and share/", "grpc_gen_hdrs_plus + [grpc_shim_rule], arches = [arch], copts = [], includes", "None, visibility = None): \"\"\"Public macro to package binaries and", "Lines expand inside squotes, so quote accordingly. include_fmt = \"#include", "# //foo/bar -> //foo/bar:bar_suffix return \"%s:%s_%s\" % (label, label.split(\"/\")[-1], suffix)", "= native.package_name() elif proto_include: full_proto_include = \"%s/%s\" % (native.package_name(), proto_include)", "rollup command # might be generated on another forge server.", "includes = includes, testonly = testonly, textual_hdrs = grpc_gen_hdrs_plus, visibility", "= depset(deps or []) if not arches: arches = EMBEDDED_ARCHES", "public symbols are the macros: decorate(path) sc_cc_lib Declare a portable", "support, and may not use the proto_include field in this", "deps], visibility = visibility, testonly = testonly, ) native.py_proto_library( name", "see _gen_proto_shims, above. * ${name}_${arch}_pb protobuf compile rules - one", "import os is not allowed in build defs. For example", "are equivalent. \"\"\" if not arches: if testonly: arches =", "[protobuf_label] includes = [] if proto_include: includes = [proto_include] #", "full_proto_include: temp_prefix = \"%s/%s\" % (cpp_out, native.package_name()[len(full_proto_include):]) # We do", "full_proto_include), ) accum_flags.append( \"$$(if [[ -e %s ]]; then echo", "options for protoc: ${src}_${arch}_proto_rollup.flags Feed each set into sc_cc_lib to", "not arches: if testonly: arches = HOST_ARCHES else: arches =", "general than it may seem: genrule doesn't have hdrs or", "= [proto_include] # Note: Public sc_proto_lib invokes this once per", "testonly = testonly, ) for src in srcs + hdrs:", "disjoint. sc_cc_lib( name = decorate(name, arch), deps = dep_set, srcs", "\"%s/%s\" % (cpp_out, native.package_name()) proto_rollups = [ decorate(decorate(dep, arch), \"proto_rollup.flags\")", "for. visibility: Standard blaze visibility parameter, passed through to subsequent", "and data for deployment. For package \"name\", generates: * ${name}_${arch}_bin", "for program-sizing build # \"-Wl,--gc-sections,--no-wchar-size-warning\", # Use this for program-sizing", "architecture. Generates a blaze select(...) statement that can be used", "So \"rpc\" and \"grpc\" are equivalent. \"\"\" if not arches:", "they should be compiled - defaults to all if left", "or []) includes = depset(includes or []) data = depset(data", "\"mkdir -p %s\" % temp_prefix, ] + proto_path_cmds + [", ") # TODO(unknown): Add support for depending on normal proto_library", "library. srcs: List of proto files hdrs: More files to", "a path. Normalizes a path by removing unnecessary path-up segments", "% full_proto_include, \"else cd $(GENDIR)/%s\" % full_proto_include, \"fi\", ])) gendir_include", "x86 = value if \"x86\" in arches else default, )", "Args: name: Base name for this library. srcs: List of", "'%s'\" % line) for line in lines] new_hdr_loc = \"$(location", "assumed, used and exported as header, not for flags, libs,", "protoc = \"$${g3}/%s\" % _loc(protoc_label) grpc_plugin = \"$${g3}/%s\" % _loc(grpc_plugin)", "Analogous to cc_library srcs argument. hdrs: Analogous to cc_library hdrs", "to generate this way. copts: Analogous to cc_library copts argument.", "&& \".join(cmds) or \"true\", ) sc_platform_alias( name = name, host", "C++11 features _and_ GNU extensions. ] # Used for linking", "\"#if defined(STRATUM_ARCH_%s)\" % \"PPC\", include_fmt % \"ppc\", \"#elif defined(STRATUM_ARCH_%s)\" %", "prefix return None def sc_platform_select(host = None, ppc = None,", "and may not use the proto_include field in this rule.", "The value to use for ppc builds. x86: The value", "srcs = hdrs + protoc_deps, visibility = visibility, ) my_proto_rollup", "string. def dquote(s): return '\"' + s + '\"' #", "proto_include field, so we keep this simple. For library \"name\",", "continue hdr_stem = filename[0:-6] new_hdr_name = hdr_stem + hdr_ext outs.append(new_hdr_name)", "shim should support. visibility: The blaze visibility of the generated", "if not arches: if testonly: arches = HOST_ARCHES else: arches", "native.package_name()) proto_rollups = [ decorate(decorate(dep, arch), \"proto_rollup.flags\") for dep in", "strip_include_prefix argument. data: Files to provide as data at runtime", "its corresponding directories. Providing own implementation because import os is", "\"-std=gnu++11\", # Allow C++11 features _and_ GNU extensions. ] #", "binary and will not attempt to build any dependencies. Args:", "path-up segments. Invalid input paths will stay invalid. \"\"\" sep", "= \"$${g3}/%s\" % _loc(protoc_label) grpc_plugin = \"$${g3}/%s\" % _loc(grpc_plugin) cpp_out", "% (label, suffix) # Creates a relative filename from a", "_X86_GRTE = \"//grte/v4_x86/release/usr/grte/v4\" # Portability definitions =================================================== def sc_cc_test( name,", "this library arches: Which architectures to build this library for,", "= name, deps = sc_platform_filter( deps, [\"//stratum/portage:dummy_with_main\"], arches, ), srcs", "TODO(unknown): Switch protobuf to using the proto_include mechanism protoc_label =", "portability hacks. _ARCH_DEFINES = sc_platform_select( default = [\"STRATUM_ARCH_HOST\"], ppc =", "set of .proto files needed to # compile this proto.", "= \"%s:well_known_types_srcs\" % protobuf_label protobuf_srcs = [protobuf_hdrs] protobuf_include = \"$${g3}/protobuf/src\"", "= sc_platform_filter( deps, [\"//stratum/portage:dummy_with_main\"], arches, ), srcs = sc_platform_filter(srcs, [],", "grpc_gen_hdrs_plus, visibility = visibility, ) def _gen_proto_shims(name, pb_modifier, srcs, arches,", "def starts_with(s, prefix_list): for prefix in prefix_list: if s.startswith(prefix): return", "We'll be cd-ing to another directory before protoc, so #", "\"host\") if \"host\" in arches else None, ppc = decorate(name,", "+ \"_grpc_proto\" grpc_dep_set = dep_set | [name] | _SC_GRPC_DEPS grpc_gen_hdrs_plus", "can supply lists of architectures for which they should be", "+ gendir_include + proto_rollup_flags + [ \"-I%s\" % protobuf_include, \"--grpc-cpp_out=%s\"", "name, actual = sc_platform_select( default = default or \"//stratum/portage:dummy\", host", "parameter. visibility: Standard blaze visibility parameter. \"\"\" deps = depset(deps", "Internally, libraries and binaries are generated for every listed architecture.", "None, visibility = None, xdeps = None): \"\"\"Creates rules for", "protoc_srcs_set = (srcs + hdrs + protoc_deps + protobuf_srcs +", "the binaries and all of the data needed for this", "proto library from this rule. Any sc_proto_lib with python support", "and some restrictions. The key difference: you can supply lists", "testonly = testonly, ) # TODO(unknown): Add support for depending", "visibility parameter. xdeps: External (file) dependencies of this library -", "this filegroup to accumulate the set of .proto files needed", "is provided for a given platform, {default} is used instead.", "= [ \"//sandblaze/prebuilt/grpc\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_base\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib\", ] _PROTOC = \"@com_google_protobuf//:protobuf:protoc\" _PROTOBUF", "data = None, deps = None, arches = None, visibility", "\"\"\"Creates rules and filegroups for embedded protobuf library. For every", "result: continue elif d == \"..\": if level > 0:", "_gen_proto_lib( name, srcs, hdrs, deps, arch, visibility, testonly, proto_include, grpc_shim_rule):", "\"@com_google_protobuf//:protobuf:protoc\" _PROTOBUF = \"@com_google_protobuf//:protobuf\" _SC_GRPC_PLUGIN = \"//sandblaze/prebuilt/protobuf:grpc_cpp_plugin\" _GRPC_PLUGIN = \"//grpc:grpc_cpp_plugin\"", "includes = depset(includes or []) data = depset(data or [])", "tools = grpc_tools, cmd = \" && \".join(cmds), heuristic_label_expansion =", "if grpc_shim_rule: grpc_name = name[:-6] + \"_grpc_proto\" grpc_dep_set = dep_set", "\"#include \" + dquote(pkg + \"/\" + hdr_stem + \".%s\"", "of arches this shim should support. visibility: The blaze visibility", "= includes, linkopts = [\"-ldl\", \"-lutil\"], testonly = testonly, visibility", "grpc_gen_srcs = [] tools = [protoc_label] grpc_tools = [protoc_label, grpc_plugin]", "= None, copts = None, defines = None, includes =", "+ \".pb.cc\" gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc) cmds = bash_vars + [ \"mkdir", "${name}_default_pb, a regular proto library. * ${name}_py, a py_proto_library based", "extension_name = \"sc_cc_test\", label_regex_for_dep = \"{extension_name}\", ) def sc_cc_lib( name,", "sc_platform_select(host = None, ppc = None, x86 = None, default", "proto_include) if full_proto_include: temp_prefix = \"%s/%s\" % (cpp_out, native.package_name()[len(full_proto_include):]) #", ".tar.gz package. Args: name: Base name for this package. bins:", "filename = parse_label(src) if not filename.endswith(\".proto\"): continue hdr_stem = filename[0:-6]", "sc_proto_lib invokes this once per (listed) arch; # which then", "result = [] for d in path.split(sep): if d in", "filename from a label, replacing \"//\" and \":\". def _make_filename(label):", "suffix) else: # bar -> bar_suffix return \"%s_%s\" % (label,", "of the grpc shim for this proto lib. \"\"\" bash_vars", "path: A valid absolute or relative path to normalize. Returns:", "platform specific files to the final tarball. platform_entries = sc_platform_select(", "tarball. platform_entries = sc_platform_select( # We use a different ppc", "testonly: Standard blaze testonly parameter. proto_include: Path to add to", "Networking Foundation # SPDX-License-Identifier: Apache-2.0 \"\"\"A portable build system for", "ALL_ARCHES service_enable = { \"grpc\": 0, } for service in", "blaze srcs argument. deps: Standard blaze deps argument. visibility: Standard", "] HOST_ARCH = \"host\" HOST_ARCHES = [HOST_ARCH] ALL_ARCHES = EMBEDDED_ARCHES", "sc_proto_lib Declare a portable .proto Library. sc_cc_bin Declare a portable", "of \"x86\" arch. HOST_ARCH Name of default \"host\" arch. HOST_ARCHES", "of this library. Only generated if python_support == True. Args:", "testonly, proto_include = proto_include, grpc_shim_rule = grpc_shim_rule, ) if python_support:", "proto_include: fail(\"Cannot use proto_include on an sc_proto_lib with python support.\")", "testonly = testonly, ) register_extension_info( extension_name = \"sc_proto_lib\", label_regex_for_dep =", "linkopts = linkopts, visibility = visibility, ) register_extension_info( extension_name =", "{default} is used instead. A specific value or default must", "== None or ppc == None or x86 == None):", "name argument. size: Analogous to cc_test size argument. srcs: Analogous", "for src in srcs: pkg, filename = parse_label(src) if not", "of architectures to generate this way. copts: Analogous to cc_library", "blaze testonly parameter. visibility: Standard blaze visibility parameter. \"\"\" deps", "Analogous to cc_library hdrs argument. arches: List of architectures to", "arches: _gen_proto_lib( name = name, srcs = srcs, hdrs =", "protobuf_label = _PROTOBUF protobuf_hdrs = \"%s:well_known_types_srcs\" % protobuf_label protobuf_srcs =", "to run protoc ${src}.proto => ${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb rule to run", "\"x86\" in arches else default, ) def sc_platform_alias( name, host", "PowerPC arch - \"ppc\". EMBEDDED_X86 Name of \"x86\" arch. HOST_ARCH", "-> google3_foo/bar/bat/baz return label.replace(\"//\", \"google3/\").replace(\":\", \"/\") elif label.startswith(\":\"): # :bat/baz", "proto_library(), and cc_binary(), but with different options and some restrictions.", "builds this target will generate a dummy binary and will", "\"google3/\").replace(\":\", \"/\") elif label.startswith(\":\"): # :bat/baz -> bat/baz return label[1:]", "using the proto_include mechanism protoc_label = _PROTOC protobuf_label = _PROTOBUF", "= [] tools = [protoc_label] grpc_tools = [protoc_label, grpc_plugin] protoc", "if dep.endswith(\"_proto\") ] proto_rollup_cmds = [\"printf '%%s\\n' %s\" % flag", "regular_proto_name = decorate(name, \"default_pb\") py_name = decorate(name, \"py\") proto_library( name", "field, so we keep this simple. For library \"name\", generates:", "for use in follow-on hdrs and/or src lists. \"\"\" outs", "or ppc == None or x86 == None): fail(\"Missing a", "name = regular_proto_name, srcs = srcs, deps = [decorate(dep, \"default_pb\")", "# # Build options for all embedded architectures # #", "arches. EMBEDDED_ARCHES All embedded arches. EMBEDDED_PPC Name of PowerPC arch", "= None, proto_include = None, python_support = False, services =", "= [\"%s.tar.gz\" % name] # Copy our files into a", "not in (grpc, rpc)\" % service) deps = depset(deps or", "services: List of services to enable {\"grpc\", \"rpc\"}; Only \"grpc\"", "> %s\" % (\" && \".join(gen_cmds), new_hdr_loc)) shim_rule = decorate(name,", "= [decorate(dep, \"default_pb\") for dep in deps], visibility = visibility,", "to subsequent rules. testonly: Standard blaze testonly parameter. proto_include: Include", "else None, ppc = decorate(name, \"ppc\") if \"ppc\" in arches", "b for b in bins.to_list()]), (\"data\", data), ]: native.Fileset( name", "on this rule. Typically \".\" python_support: Defaults to False. If", "name] # Copy our files into a temporary directory and", "[], arches), include_prefix = include_prefix, strip_include_prefix = strip_include_prefix, testonly =", "bat/baz -> bat/baz return label # Adds dquotes around a", "deps, visibility = visibility, testonly = testonly, ) register_extension_info( extension_name", "_ARCH_DEFINES textual_plus = textual_hdrs | depset(deps.to_list()) cc_library( name = name,", "name, size = None, srcs = None, deps = None,", "grpc_shim_rule = grpc_shim_rule, ) if python_support: if proto_include: fail(\"Cannot use", "elif service == \"rpc\": service_enable[\"grpc\"] = 1 else: fail(\"service='%s' not", "_EMBEDDED_CFLAGS = [ \"-I$(GENDIR)\", ] # Used for C++ compiler", "grpc_plugin, ] + gendir_include + proto_rollup_flags + [ \"-I%s\" %", "xdeps: External (file) dependencies of this library - no decorations", "visibility, testonly = testonly, proto_include = proto_include, grpc_shim_rule = grpc_shim_rule,", ".\" % _loc(name + \".tar.gz\"), \"rm -rf $${TEMP_DIR}\", ] native.genrule(", "sc_cc_bin rules to be packaged. data: List of files (and", "[], includes = includes, testonly = testonly, textual_hdrs = grpc_gen_hdrs_plus,", "= [] hdr_ext = pb_modifier + \".h\" for src in", "cmd = proto_rollup_cmd, visibility = visibility, testonly = testonly, )", "w/ erpc plugin: ${src}.proto => ${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup collects include options", "src.endswith(\".proto\"): src_stem = src[0:-6] src_arch = \"%s_%s\" % (src_stem, arch)", "of: #ifdef logic to select path.${arch}.pb.h Also generates an alias", "We can't use $${PWD} until this step, because our rollup", "2018-present Open Networking Foundation # SPDX-License-Identifier: Apache-2.0 \"\"\"A portable build", "to cc_test data argument. defines: Analogous to cc_test defines argument.", "path & label helpers. ============================================ def _normpath(path): \"\"\"Normalize a path.", "proto target based on the currently selected platform architecture. Args:", "bins: List of sc_cc_bin rules to be packaged. data: List", "build system for Stratum P4 switch stack. To use this,", "cmds = [] hdr_ext = pb_modifier + \".h\" for src", "\"$(location %s)\" % target def _gen_proto_lib( name, srcs, hdrs, deps,", "None if (service_enable[\"grpc\"]): grpc_shim_rule = _gen_proto_shims( name = decorate(name[:-6], \"grpc_proto\"),", "py_proto_library version of this library. Only generated if python_support ==", "testonly, textual_hdrs = sc_platform_filter( textual_plus | xdeps, [], arches, ),", "fileset_name = decorate(name, \"fs\") for extension, inputs in [ (\"bin\",", "%s -h -C $${TEMP_DIR}/tarball .\" % _loc(name + \".tar.gz\"), \"rm", "service) deps = depset(deps or []) shim_rule = _gen_proto_shims( name", "our ppc # executables. ppc = [native.FilesetEntry( srcdir = \"%s:BUILD\"", "grpc_shim_rule: grpc_name = name[:-6] + \"_grpc_proto\" grpc_dep_set = dep_set |", "Library. sc_proto_lib Declare a portable .proto Library. sc_cc_bin Declare a", "you can supply lists of architectures for which they should", "# Generate GRPC if grpc_shim_rule: gen_grpc_pb_h = gen_stem + \".grpc.pb.h\"", ") dep_set = depset(deps) | [protobuf_label] includes = [] if", "+ hdr_stem + \".%s\" + hdr_ext) lines = [ \"#if", "(cpp_out, native.package_name()[len(full_proto_include):]) # We do a bit of extra work", "is selected. During embedded builds this target will generate a", "value to use for x86 builds. default: The value to", "blaze name argument. srcs: Standard blaze srcs argument. deps: Standard", "google3_foo/bar/bat/baz return label.replace(\"//\", \"google3/\").replace(\":\", \"/\") elif label.startswith(\":\"): # :bat/baz ->", "* ${name}_shim aka .pb.h master switch - see _gen_proto_shims, above.", "grpc_shim_rule, ) if python_support: if proto_include: fail(\"Cannot use proto_include on", "else default, x86 = value if \"x86\" in arches else", "files to the final tarball. platform_entries = sc_platform_select( # We", "x86: The value to use for x86 builds. default: The", "[] if proto_include: includes = [proto_include] # Note: Public sc_proto_lib", "if \":\" in label: # .../bar:bat -> .../bar:bat_suffix return \"%s_%s\"", "library for, None => EMBEDDED_ARCHES (HOST_ARCHES not generally supported). visibility:", "be a much cleaner solution. Currently supported architectures: ppc x86", "destdir = \"bin\", ), native.FilesetEntry( srcdir = decorate(name, \"data\"), destdir", "library for. visibility: Standard blaze visibility parameter, passed through to", "every listed architecture. The names are decorated to keep them", "cc_library( name = name, deps = sc_platform_filter(deps, [], arches), srcs", "location for constructing commands. Args: target: Blaze target name available", "Only \"grpc\" is supported. So \"rpc\" and \"grpc\" are equivalent.", "\".pb.h\" gen_pb_cc = gen_stem + \".pb.cc\" gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc) cmds =", "to cc_library name argument. deps: Analogous to cc_library deps argument.", "a py_proto_library from the given srcs. There's no clean way", "allow all to be generated and addressed independently. This aspect", "= visibility, testonly = testonly, ) for src in srcs", "(\"\", \".\"): if result: continue elif d == \"..\": if", "decorate(name, extension), entries = [ native.FilesetEntry( files = inputs, ),", "hdrs, deps, arch, visibility, testonly, proto_include, grpc_shim_rule): \"\"\"Creates rules and", "name = name, srcs = srcs, hdrs = [shim_rule] +", "$${f%.stripped}\", # rename not available. \"done\", \"fi\", \"tar czf %s", ") STRATUM_INTERNAL = [ \"//stratum:__subpackages__\", ] # # Build options", "in arches else None, ppc = decorate(name, \"ppc\") if \"ppc\"", ") my_proto_rollup = decorate(name_arch, \"proto_rollup.flags\") protoc_srcs_set = (srcs + hdrs", "print(\"Invalid proto include '%s' doesn't match src %s\" % (full_proto_include,", "% (temp_stem, _loc(gen_pb_h)), \"cp %s.pb.cc %s\" % (temp_stem, _loc(gen_pb_cc)), ]", "specifying the symbols needed. The public symbols are the macros:", "value to use for host builds. ppc: The value to", "in EMBEDDED_ARCHES: grpc_plugin = _SC_GRPC_PLUGIN else: grpc_plugin = _GRPC_PLUGIN protoc_deps", "include_fmt % \"ppc\", \"#elif defined(STRATUM_ARCH_%s)\" % \"X86\", include_fmt % \"x86\",", "rule based on the target platform architecture. If no selection", "linkopts argument. visibility: Analogous to cc_test visibility argument. \"\"\" cc_test(", "deps argument. srcs: Analogous to cc_binary srcs argument. arches: List", "+ hdrs, deps = deps, arch = arch, visibility =", "to this package. arches: Which architectures to build this library", "proto_include mechanism protoc_label = _PROTOC protobuf_label = _PROTOBUF protobuf_hdrs =", "[\"%s.stripped\" % b for b in bins.to_list()]), (\"data\", data), ]:", "= value if \"ppc\" in arches else default, x86 =", "def sc_cc_test( name, size = None, srcs = None, deps", "arches: arches = EMBEDDED_ARCHES fileset_name = decorate(name, \"fs\") for extension,", "dep in deps.to_list() ], visibility = visibility, ) # Add", "ppc == None or x86 == None): fail(\"Missing a select", "# .../bar:bat -> .../bar:bat_suffix return \"%s_%s\" % (label, suffix) elif", "of deps for this library arch: Which architecture to build", "# Protobuf ================================================================= _SC_GRPC_DEPS = [ \"//sandblaze/prebuilt/grpc\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_base\", \"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib\", ]", "HOST_ARCH = \"host\" HOST_ARCHES = [HOST_ARCH] ALL_ARCHES = EMBEDDED_ARCHES +", "name: Base name for this package. bins: List of sc_cc_bin", "way to make python protos work with sc_proto_lib's proto_include field,", "visibility = visibility, testonly = testonly, proto_include = proto_include, grpc_shim_rule", "x86 = None, default = None, visibility = None): \"\"\"Public", "d in path.split(sep): if d in (\"\", \".\"): if result:", "Adds a suffix to a label, expanding implicit targets if", "= [(\"printf '%%s\\\\n' '%s'\" % line) for line in lines]", "[], ) native.Fileset( name = fileset_name, out = name, entries", "None or ppc == None or x86 == None): fail(\"Missing", "name: Analogous to cc_test name argument. size: Analogous to cc_test", "None, visibility = visibility, ) return shim_rule def _gen_py_proto_lib(name, srcs,", "None, ppc = None, x86 = None, default = None):", "${src}_${arch}_proto. Args: name: Base name for this library. srcs: List", "name for this library. srcs: List of proto files hdrs:", "\"grpc_proto\"), pb_modifier = \".grpc.pb\", srcs = srcs + hdrs, arches", "src_arch + \".grpc.pb\", srcs = protoc_srcs_set, outs = grpc_pb_outs, tools", "architectures # # Set _TRACE_SRCS to show sources in embedded", "SPDX-License-Identifier: Apache-2.0 \"\"\"A portable build system for Stratum P4 switch", "decorate(name, arch), deps = dep_set, srcs = gen_srcs, hdrs =", "[]) + _ARCH_DEFINES textual_plus = textual_hdrs | depset(deps.to_list()) cc_library( name", "List of arches this shim should support. visibility: The blaze", "sep.join(result) # Adds a suffix to a label, expanding implicit", ".proto files - private to this library. hdrs: As above,", "= [] cmds = [] hdr_ext = pb_modifier + \".h\"", "Declare a portable Binary. sc_package Declare a portable tarball package.", "changes # before tarballing. cmds = [ \"TEMP_DIR=$(@D)/stratum_packaging_temp\", \"mkdir $${TEMP_DIR}\",", "into a temporary directory and make any necessary changes #", "TODO(unknown): Add support for depending on normal proto_library rules. def", "Embedded build definitions. ============================================== EMBEDDED_PPC = \"ppc\" EMBEDDED_X86 = \"x86\"", "break the build when an embedded arch is selected. During", "+ \"/\"): proto_src_loc = proto_src_loc[len(full_proto_include) + 1:] else: print(\"Invalid proto", "platform architecture. Generates a blaze select(...) statement that can be", "if grpc_shim_rule: gen_grpc_pb_h = gen_stem + \".grpc.pb.h\" gen_grpc_pb_cc = gen_stem", "and arches. Args: name: Analogous to cc_library name argument. deps:", "an alias that changes based on target arch. Generates a", "of architectures to generate this way. copts: Analogous to cc_binary", "equivalent to the input path with minimal use of path-up", "used and exported as header, not for flags, libs, etc.", ".../bar:bat_suffix return \"%s_%s\" % (label, suffix) elif label.startswith(\"//\"): # //foo/bar", "[], arches), hdrs = sc_platform_filter(hdrs, [], arches), alwayslink = alwayslink,", "= [], arches = [], visibility = None, testonly =", "packaged. deps: List of other sc_packages to add to this", "None => ALL. visibility: Standard blaze visibility parameter, passed through", "proto. native.filegroup( name = decorate(name_arch, \"headers\"), srcs = hdrs +", "+ protoc_deps + protobuf_srcs + [my_proto_rollup]) gen_srcs = [] gen_hdrs", "= { \"grpc\": 0, } for service in services or", "elif d == \"..\": if level > 0: result.pop() level", "deps = depset(deps or []) if not arches: arches =", "= depset(deps) | [protobuf_label] includes = [] if proto_include: includes", "[[ -e $${TEMP_DIR}/tarball/bin ]]\", \"then for f in $${TEMP_DIR}/tarball/bin/*.stripped\", \"", "] + proto_path_cmds + [ \" \".join([ protoc, \"--plugin=protoc-gen-grpc-cpp=%s\" %", "visibility = visibility, ) return shim_rule def _gen_py_proto_lib(name, srcs, deps,", "support for depending on normal proto_library rules. def sc_proto_lib( name", "to all if left unstated. Internally, libraries and binaries are", "= None, visibility = None): \"\"\"Creates a cc_test rule that", "on ${name}_default_pb. Args: name: Standard blaze name argument. srcs: Standard", "+ \"ppc\": (ppc or default), config_label_prefix + \"x86\": (x86 or", "this for program-sizing build #-- \"-Os\", # Use this for", "in services or []: if service == \"grpc\": service_enable[\"grpc\"] =", "generate this way. copts: Analogous to cc_binary copts argument. defines:", "proto_include = proto_include, grpc_shim_rule = grpc_shim_rule, ) if python_support: if", "czf %s -h -C $${TEMP_DIR}/tarball .\" % _loc(name + \".tar.gz\"),", "macros: decorate(path) sc_cc_lib Declare a portable Library. sc_proto_lib Declare a", "list of arches. def sc_platform_filter(value, default, arches): return sc_platform_select( host", "Used for C++ compiler invocations. _EMBEDDED_CXXFLAGS = [ \"-std=gnu++11\", #", "Returns: $(location target) \"\"\" return \"$(location %s)\" % target def", "defines = None, includes = None, testonly = None, visibility", "# Adds dquotes around a string. def dquote(s): return '\"'", "pb_modifier = \".grpc.pb\", srcs = srcs + hdrs, arches =", "\"data\"), destdir = \"share\", ), ] + platform_entries, visibility =", "a portable tarball package. and the variables/lists: ALL_ARCHES All known", "% (temp_stem, _loc(gen_grpc_pb_cc)), ] grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc] native.genrule( name", "share/ respectively. * ${name}_${arch}_tarball rule builds that .tar.gz package. Args:", "that .tar.gz package. Args: name: Base name for this package.", "copts, linkopts = linkopts, visibility = visibility, ) register_extension_info( extension_name", "${name}_py a py_proto_library version of this library. Only generated if", "sc_platform_select( host = value if \"host\" in arches else default,", "support.\") _gen_py_proto_lib( name = name, srcs = depset(srcs + hdrs),", "= outs, cmd = \"; \".join(cmds), visibility = visibility, )", "[]) data = depset(data or []) deps = depset(deps or", "are generated for every listed architecture. The names are decorated", "% (cpp_out, native.package_name()) proto_rollups = [ decorate(decorate(dep, arch), \"proto_rollup.flags\") for", "might be in GENDIR! proto_path_cmds.append(\"; \".join([ \"if [[ -e %s", "configuration fragment would be a much cleaner solution. Currently supported", "Used for C and C++ compiler invocations. _EMBEDDED_CFLAGS = [", "full_proto_include = \"%s/%s\" % (native.package_name(), proto_include) if full_proto_include: temp_prefix =", "\"-D\" compilation options. includes: Paths to add as \"-I\" compilation", "use a different ppc toolchain for Stratum. # This means", "# Use this for program-sizing build # \"-Wl,--gc-sections,--no-wchar-size-warning\", # Use", "Name of PowerPC arch - \"ppc\". EMBEDDED_X86 Name of \"x86\"", "to alter blaze rules based on the platform architecture. Generates", "STRATUM_ARCH\", \"#endif\", ] gen_cmds = [(\"printf '%%s\\\\n' '%s'\" % line)", "'%%s\\\\n' '%s'\" % line) for line in lines] new_hdr_loc =", "\".join(gen_cmds), new_hdr_loc)) shim_rule = decorate(name, \"shims\") native.genrule( name = shim_rule,", "Standard blaze srcs argument. deps: Standard blaze deps argument. visibility:", "proto_path_cmds + [ \" \".join([ protoc, \"--plugin=protoc-gen-grpc-cpp=%s\" % grpc_plugin, ]", "forge server. proto_path_cmds = [\"rollup=$$(sed \\\"s,G3LOC,$${PWD},g\\\" %s)\" % _loc(my_proto_rollup)] proto_rollup_flags", "the data needed for this package and all dependency packages.", "for C++ compiler invocations. _EMBEDDED_CXXFLAGS = [ \"-std=gnu++11\", # Allow", "% (dep, arch)) name_arch = decorate(name, arch) # We use", "services to enable {\"grpc\", \"rpc\"}; Only \"grpc\" is supported. So", "[]) deps = depset(deps or []) if not arches: arches", "We'll use tcmalloc \"-fno-builtin-calloc\", \"-fno-builtin-realloc\", \"-fno-builtin-free\", \"-D__STDC_FORMAT_MACROS=1\", # TODO(unknown): Figure", "attempt to build any dependencies. Args: name: Analogous to cc_test", "gen_stem + \".grpc.pb.cc\" grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc) cmds = bash_vars + [", "(\"data\", data), ]: native.Fileset( name = decorate(fileset_name, extension), out =", "= \"{ %s; } | sort -u -o $(@)\" %", "gendir_include + proto_rollup_flags + [ \"-I%s\" % protobuf_include, \"--cpp_out=%s\" %", "library - no decorations assumed, used and exported as header,", "of arches. def sc_platform_filter(value, default, arches): return sc_platform_select( host =", "= proto_src_loc[len(full_proto_include) + 1:] else: print(\"Invalid proto include '%s' doesn't", "invalid. \"\"\" sep = \"/\" level = 0 result =", "grpc_gen_hdrs_plus = grpc_gen_hdrs + gen_hdrs sc_cc_lib( name = decorate(grpc_name, arch),", "aspect of the system is suboptimal - something along the", "load(\"@rules_cc//cc:defs.bzl\", \"cc_binary\", \"cc_library\", \"cc_test\") # Generic path & label helpers.", "depset(data or []) textual_hdrs = depset(textual_hdrs or []) if srcs:", "= grpc_gen_srcs, hdrs = hdrs + grpc_gen_hdrs_plus + [grpc_shim_rule], arches", "# Identify Stratum platform arch for .pb.h shims and other", "None): \"\"\"Creates a cc_test rule that interacts safely with Stratum", "-e $${TEMP_DIR}/tarball/bin ]]\", \"then for f in $${TEMP_DIR}/tarball/bin/*.stripped\", \" do", "decorate(name[:-6], \"grpc_proto\"), pb_modifier = \".grpc.pb\", srcs = srcs + hdrs,", "line) for line in lines] new_hdr_loc = \"$(location %s)\" %", "A valid absolute or relative path to normalize. Returns: A", "def decorate(label, suffix): if label.endswith(\":\"): # .../bar: -> .../bar label", "[]) textual_hdrs = depset(textual_hdrs or []) if srcs: if [s", "\"\"\" native.alias( name = name, actual = sc_platform_select( default =", "\"%s_%s\" % (src_stem, arch) temp_stem = \"%s/%s\" % (temp_prefix, src_stem)", "copts = None, linkopts = None, visibility = None): \"\"\"Creates", "to cc_binary srcs argument. arches: List of architectures to generate", "bat/baz return label # Adds dquotes around a string. def", "name, host = None, ppc = None, x86 = None,", "\"//stratum/portage:dummy\", host = host, ppc = ppc, x86 = x86,", "to be packaged. deps: List of other sc_packages to add", "echo -IG3LOC/%s; fi)\" % (full_proto_include, full_proto_include), ) else: temp_prefix =", "proto library. * ${name}_py, a py_proto_library based on ${name}_default_pb. Args:", "============================================ def _normpath(path): \"\"\"Normalize a path. Normalizes a path by", "Apache-2.0 \"\"\"A portable build system for Stratum P4 switch stack.", "from Message protobuf(s). For library \"name\", generates: * ${name}_shim aka", "[shim_rule] + hdrs, deps = deps, arch = arch, visibility", "\"host\" arch. HOST_ARCHES All host arches. STRATUM_INTERNAL For declaring Stratum", "= 0 result = [] for d in path.split(sep): if", "= \"$(location %s)\" % new_hdr_name cmds.append(\"{ %s; } > %s\"", "new_hdr_loc)) shim_rule = decorate(name, \"shims\") native.genrule( name = shim_rule, srcs", "grpc_dep_set, srcs = grpc_gen_srcs, hdrs = hdrs + grpc_gen_hdrs_plus +", "grpc_gen_srcs, hdrs = hdrs + grpc_gen_hdrs_plus + [grpc_shim_rule], arches =", "(dep, arch)) name_arch = decorate(name, arch) # We use this", "deps = sc_platform_select( host = deps or [], default =", "or []) data = depset(data or []) textual_hdrs = depset(textual_hdrs", "a portable .proto Library. sc_cc_bin Declare a portable Binary. sc_package", "available. \"done\", \"fi\", \"tar czf %s -h -C $${TEMP_DIR}/tarball .\"", "srcs, deps = [decorate(dep, \"default_pb\") for dep in deps], visibility", "testonly, ) # TODO(unknown): Add support for depending on normal", "decorate(name_arch, \"proto_rollup\"), srcs = proto_rollups, outs = [my_proto_rollup], cmd =", "depend on this rule. Typically \".\" python_support: Defaults to False.", "_EMBEDDED_FLAGS = [ \"-O0\", # Don't use this for program-sizing", "textual_hdrs | depset(deps.to_list()) cc_library( name = name, deps = sc_platform_filter(deps,", "P4 switch stack. To use this, load() this file in", "\"fi\", \"tar czf %s -h -C $${TEMP_DIR}/tarball .\" % _loc(name", "appear as a `src'. # TODO(unknown): if useful again then", "gen_srcs, hdrs = hdrs + gen_hdrs, arches = [arch], copts", "= decorate(name, \"x86\") if \"x86\" in arches else None, visibility", "to build into this library, but also exported for dependent", "visibility, ) dep_set = depset(deps) | [protobuf_label] includes = []", "] + proto_path_cmds + [ \" \".join([protoc] + gendir_include +", "this library. pb_modifier: protoc plugin-dependent file extension (e.g.: .pb) srcs:", "= proto_rollup_cmd, visibility = visibility, testonly = testonly, ) for", "(native.package_name(), src) proto_path_cmds.append(\"[[ -e %s ]] || cd $(GENDIR)\" %", "= (defines or []) + _ARCH_DEFINES cc_binary( name = name,", "parameter. proto_include: Path to add to include path. This will", "= arches, visibility = visibility, ) for arch in arches:", "platform, {default} is used instead. A specific value or default", "name of the alias target. host: The result of the", "continue elif d == \"..\": if level > 0: result.pop()", "to cc_test deps argument. data: Analogous to cc_test data argument.", "for line in lines] new_hdr_loc = \"$(location %s)\" % new_hdr_name", "rules for the given portable binary and arches. Args: name:", "if \"ppc\" in arches else None, x86 = decorate(name, \"x86\")", "+ HOST_ARCHES # Identify Stratum platform arch for .pb.h shims", "= arches, visibility = visibility, ) grpc_shim_rule = None if", "), native.FilesetEntry( srcdir = decorate(name, \"data\"), destdir = \"share\", ),", "testonly = testonly, ) native.py_proto_library( name = py_name, api_version =", "appropriate target. If no selection is provided for a given", "\"small\", srcs = sc_platform_select(host = srcs or [], default =", "load() this file in a BUILD file, specifying the symbols", "to use for any of {host,ppc,x86} that isn't specified. Returns:", "[] gen_hdrs = [] grpc_gen_hdrs = [] grpc_gen_srcs = []", "given platform, {default} is used instead. A specific value or", "\"%s/%s\" % (temp_prefix, src_stem) gen_stem = \"%s.%s\" % (src_stem, arch)", "hdr_ext = pb_modifier + \".h\" for src in srcs: pkg,", "selected platform architecture. Args: name: Base name for this library.", "For package \"name\", generates: * ${name}_${arch}_bin and ${name}_${arch}_data filesets containing", "return label # Adds dquotes around a string. def dquote(s):", "\"proto_rollup\"), srcs = proto_rollups, outs = [my_proto_rollup], cmd = proto_rollup_cmd,", "= \"{extension_name}\", ) def sc_cc_bin( name, deps = None, srcs", "HOST_ARCHES # Identify Stratum platform arch for .pb.h shims and", "all if left unstated. Internally, libraries and binaries are generated", "sc_platform_filter(value, default, arches): return sc_platform_select( host = value if \"host\"", "much cleaner solution. Currently supported architectures: ppc x86 \"\"\" load(\"//tools/build_defs/label:def.bzl\",", "not attempt to build any dependencies. Args: name: Analogous to", "testonly, ) for src in srcs + hdrs: if src.endswith(\".proto\"):", "hdrs, deps = deps, arch = arch, visibility = visibility,", "of .proto files needed to # compile this proto. native.filegroup(", "for host builds. ppc: The value to use for ppc", "hdrs = sc_platform_filter(hdrs, [], arches), alwayslink = alwayslink, copts =", "//foo/bar:bat/baz -> google3_foo/bar/bat/baz return label.replace(\"//\", \"google3/\").replace(\":\", \"/\") elif label.startswith(\":\"): #", "= None, x86 = None, default = None, visibility =", "\"x86\": (x86 or default), }) # Generates an sc_platform_select based", ") for arch in arches: _gen_proto_lib( name = name, srcs", "The value to use for host builds. ppc: The value", "EMBEDDED_ARCHES All embedded arches. EMBEDDED_PPC Name of PowerPC arch -", "linking binaries. _EMBEDDED_LDFLAGS = [ # \"-static\", # Use this", "once per (listed) arch; # which then calls sc_cc_lib with", "\"bin\", ), native.FilesetEntry( srcdir = decorate(name, \"data\"), destdir = \"share\",", "and/or src lists. \"\"\" outs = [] cmds = []", "config_label_prefix + \"ppc\": (ppc or default), config_label_prefix + \"x86\": (x86", "subsequent rules. testonly: Standard blaze testonly parameter. proto_include: Include path", "For every given ${src}.proto, generate: :${src}_${arch}.pb rule to run protoc", "rules for the given portable library and arches. Args: name:", "[]) copts = depset(copts or []) includes = depset(includes or", "+ gendir_include + proto_rollup_flags + [ \"-I%s\" % protobuf_include, \"--cpp_out=%s\"", "srcs: Standard blaze srcs argument. deps: Standard blaze deps argument.", "\"-I$(GENDIR)\", ] # Used for C++ compiler invocations. _EMBEDDED_CXXFLAGS =", "specified. visibility: The visibility of the alias target. \"\"\" native.alias(", "like cc_library(), proto_library(), and cc_binary(), but with different options and", "\".join(cmds), heuristic_label_expansion = 0, visibility = visibility, ) dep_set =", "from this rule. Any sc_proto_lib with python support may only", "supported architectures: ppc x86 \"\"\" load(\"//tools/build_defs/label:def.bzl\", \"parse_label\") load( \"//devtools/build_cleaner/skylark:build_defs.bzl\", \"register_extension_info\",", "x86 \"\"\" load(\"//tools/build_defs/label:def.bzl\", \"parse_label\") load( \"//devtools/build_cleaner/skylark:build_defs.bzl\", \"register_extension_info\", ) load(\"@rules_proto//proto:defs.bzl\", \"proto_library\")", "# PPC ====================================================================== _PPC_GRTE = \"//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot\" # X86 ====================================================================== _X86_GRTE", "used instead. Args: name: The name of the alias target.", "simple. For library \"name\", generates: * ${name}_default_pb, a regular proto", "input path with minimal use of path-up segments. Invalid input", "(ppc or default), config_label_prefix + \"x86\": (x86 or default), })", "this library. srcs: List of proto files hdrs: More files", "include_prefix = include_prefix, strip_include_prefix = strip_include_prefix, testonly = testonly, textual_hdrs", "path. This will affect the symbols generated by protoc, as", "decorate(name, \"tarball\"), srcs = [\":%s\" % fileset_name], outs = outs,", "gen_hdrs, arches = [arch], copts = [], includes = includes,", "= depset(xdeps or []) copts = depset(copts or []) includes", "service_enable[\"grpc\"] = 1 else: fail(\"service='%s' not in (grpc, rpc)\" %", "None, testonly = None, visibility = None): \"\"\"Creates rules for", "build any dependencies. Args: name: Analogous to cc_test name argument.", "#-- \"-Os\", # Use this for program-sizing build \"-g\", #", "file extension (e.g.: .pb) srcs: List of proto files. arches:", "name, deps = sc_platform_filter( deps, [\"//stratum/portage:dummy_with_main\"], arches, ), srcs =", "data = data or [], defines = defines, copts =", "=> ${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb rule to run protoc w/ erpc plugin:", "${name}_shim aka .pb.h master switch - see _gen_proto_shims, above. *", "= name, pb_modifier = \".pb\", srcs = srcs + hdrs,", "as the arches are disjoint. sc_cc_lib( name = decorate(name, arch),", "= name, entries = [ native.FilesetEntry( srcdir = decorate(name, \"bin\"),", "to utilize. deps: List of deps for this library arches:", "target platform. Args: host: The value to use for host", "[proto_include] # Note: Public sc_proto_lib invokes this once per (listed)", "selection is provided for a given platform and no default", "[] full_proto_include = None if proto_include == \".\": full_proto_include =", "= \"%s.%s\" % (src_stem, arch) # We can't use $${PWD}", "\"-I%s\" % protobuf_include, \"--grpc-cpp_out=%s\" % cpp_out, proto_src_loc, ]), \"cd $${g3}\",", "testonly = None, visibility = None): \"\"\"Creates rules for the", "grpc_plugin = _SC_GRPC_PLUGIN else: grpc_plugin = _GRPC_PLUGIN protoc_deps = []", "extension_name = \"sc_proto_lib\", label_regex_for_dep = \"{extension_name}\", ) def sc_package( name", "\".pb\", srcs = protoc_srcs_set, outs = pb_outs, tools = tools,", "instead. Args: name: The name of the alias target. host:", "for this proto lib. \"\"\" bash_vars = [\"g3=$${PWD}\"] # TODO(unknown):", "(srcs + hdrs + protoc_deps + protobuf_srcs + [my_proto_rollup]) gen_srcs", "include_fmt = \"#include \" + dquote(pkg + \"/\" + hdr_stem", "visibility: Analogous to cc_test visibility argument. \"\"\" cc_test( name =", "the name of the grpc shim for this proto lib.", "protobuf_label protobuf_srcs = [protobuf_hdrs] protobuf_include = \"$${g3}/protobuf/src\" if arch in", "use for any of {host,ppc,x86} that isn't specified. Returns: The", "given platform and no default is set, a dummy default", "+ s + \"'\" # Emulate Python 2.5+ str(startswith([prefix ...])", "[gen_pb_h, gen_pb_cc] native.genrule( name = src_arch + \".pb\", srcs =", "arches. Args: name: Analogous to cc_binary name argument. deps: Analogous", "textual_hdrs = gen_hdrs, visibility = visibility, ) if grpc_shim_rule: grpc_name", "that isn't specified. Returns: The requested selector. \"\"\" if default", "to this build. Returns: $(location target) \"\"\" return \"$(location %s)\"", "bin/ and share/ respectively. * ${name}_${arch}_tarball rule builds that .tar.gz", "\"-D__GOOGLE_STL_LEGACY_COMPATIBILITY\", ] # Used for C and C++ compiler invocations.", "%s\" % (\" && \".join(gen_cmds), new_hdr_loc)) shim_rule = decorate(name, \"shims\")", "for program-sizing build \"-Wall\", \"-Werror\", # Warn lots, and force", "alwayslink = alwayslink, copts = sc_platform_filter(copts, [], arches), defines =", "= decorate(name, arch) # We use this filegroup to accumulate", "+ \".pb.h\" gen_pb_cc = gen_stem + \".pb.cc\" gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc) cmds", "Which architectures to build this library for, None => ALL.", "= testonly, proto_include = proto_include, grpc_shim_rule = grpc_shim_rule, ) if", "on ${src}_${arch}_proto. Args: name: Base name for this library. srcs:", "def sc_proto_lib( name = None, srcs = [], hdrs =", "[] cmds = [] hdr_ext = pb_modifier + \".h\" for", "blaze visibility parameter, passed through to all filesets. \"\"\" bins", ".../bar:bat -> .../bar:bat_suffix return \"%s_%s\" % (label, suffix) elif label.startswith(\"//\"):", "name: Base name for this library. pb_modifier: protoc plugin-dependent file", "and make any necessary changes # before tarballing. cmds =", "alias target. \"\"\" native.alias( name = name, actual = sc_platform_select(", "portable binary and arches. Args: name: Analogous to cc_binary name", "default = [\"//stratum/portage:dummy_with_main\"], ), data = data or [], defines", "extension (e.g.: .pb) srcs: List of proto files. arches: List", "of sc_cc_bin rules to be packaged. data: List of files", "= \"ppc\" EMBEDDED_X86 = \"x86\" EMBEDDED_ARCHES = [ EMBEDDED_PPC, EMBEDDED_X86,", "runtime (host builds only). testonly: Standard blaze testonly parameter. textual_hdrs:", "needed. def decorate(label, suffix): if label.endswith(\":\"): # .../bar: -> .../bar", "visibility = None): \"\"\"Creates rules for the given portable binary", "deps or [], default = [\"//stratum/portage:dummy_with_main\"], ), data = data", "a cc_test rule that doesn't break the build when an", "multi-arch library from Message protobuf(s). For library \"name\", generates: *", "build .pb.h multi-arch master switch for sc_proto_lib. For each src", "native.genrule( name = decorate(name, \"tarball\"), srcs = [\":%s\" % fileset_name],", "% proto_src_loc) gendir_include = [\"-I$(GENDIR)\", \"-I.\"] # Generate messages gen_pb_h", "= deps, arch = arch, visibility = visibility, testonly =", "temporary directory and make any necessary changes # before tarballing.", "= inputs, ), ] + [ native.FilesetEntry(srcdir = decorate(dep, extension))", "copts argument. defines: Symbols added as \"-D\" compilation options. includes:", "The requested selector. \"\"\" if default == None and (host", "at least one platform in \" + \"sc_platform_select. Please add.\")", "List of services to enable {\"grpc\", \"rpc\"}; Only \"grpc\" is", "\"\"\" outs = [] cmds = [] hdr_ext = pb_modifier", "cmds.append(\"{ %s; } > %s\" % (\" && \".join(gen_cmds), new_hdr_loc))", "* ${name}_${arch}_tarball rule builds that .tar.gz package. Args: name: Base", "in embedded sc_cc_lib compile steps. # This is more general", "visibility parameter, passed through to subsequent rules. testonly: Standard blaze", "\"-I%s\" % protobuf_include, \"--cpp_out=%s\" % cpp_out, proto_src_loc, ]), \"cd $${g3}\",", "blaze. \"-fno-builtin-malloc\", # We'll use tcmalloc \"-fno-builtin-calloc\", \"-fno-builtin-realloc\", \"-fno-builtin-free\", \"-D__STDC_FORMAT_MACROS=1\",", "] grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc] native.genrule( name = src_arch +", "for this library arch: Which architecture to build this library", "dep_set = depset(deps) | [protobuf_label] includes = [] if proto_include:", "deps = depset(deps or []) shim_rule = _gen_proto_shims( name =", "based on the target platform architecture. If no selection is", "visibility = None, xdeps = None): \"\"\"Creates rules for the", "\"\"\" deps = depset(deps or []) srcs = depset(srcs or", "symlinks = \"dereference\", )], default = [], ) native.Fileset( name", "GRPC if grpc_shim_rule: gen_grpc_pb_h = gen_stem + \".grpc.pb.h\" gen_grpc_pb_cc =", "x86 = [\"STRATUM_ARCH_X86\"], ) STRATUM_INTERNAL = [ \"//stratum:__subpackages__\", ] #", "them different and allow all to be generated and addressed", "= \"{extension_name}\", ) def sc_cc_lib( name, deps = None, srcs", "level = 0 result = [] for d in path.split(sep):", "of services to enable {\"grpc\", \"rpc\"}; Only \"grpc\" is supported.", "depset(bins or []) data = depset(data or []) deps =", "= None, testonly = None, visibility = None): \"\"\"Creates rules", "\"\"\" load(\"//tools/build_defs/label:def.bzl\", \"parse_label\") load( \"//devtools/build_cleaner/skylark:build_defs.bzl\", \"register_extension_info\", ) load(\"@rules_proto//proto:defs.bzl\", \"proto_library\") load(\"@rules_cc//cc:defs.bzl\",", "based on the platform architecture. Generates a blaze select(...) statement", "protoc_srcs_set, outs = grpc_pb_outs, tools = grpc_tools, cmd = \"", "-IG3LOC/%s; fi)\" % (full_proto_include, full_proto_include), ) else: temp_prefix = \"%s/%s\"", "] + gendir_include + proto_rollup_flags + [ \"-I%s\" % protobuf_include,", "cc_library name argument. deps: Analogous to cc_library deps argument. srcs:", "path.split(sep): if d in (\"\", \".\"): if result: continue elif", "may only depend on sc_proto_libs that also have python support,", "srcdir = decorate(name, \"data\"), destdir = \"share\", ), ] +", "\"%s/%s\" % (native.package_name(), src) if proto_src_loc.startswith(full_proto_include + \"/\"): proto_src_loc =", "bins = depset(bins or []) data = depset(data or [])", "if (service_enable[\"grpc\"]): grpc_shim_rule = _gen_proto_shims( name = decorate(name[:-6], \"grpc_proto\"), pb_modifier", "testonly = testonly, textual_hdrs = sc_platform_filter( textual_plus | xdeps, [],", "[]: if service == \"grpc\": service_enable[\"grpc\"] = 1 elif service", "alter blaze rules based on the platform architecture. Generates a", "include_prefix, strip_include_prefix = strip_include_prefix, testonly = testonly, textual_hdrs = sc_platform_filter(", "$${TEMP_DIR}\", \"cp -r %s $${TEMP_DIR}/tarball\" % _loc(fileset_name), \"if [[ -e", "EMBEDDED_X86 Name of \"x86\" arch. HOST_ARCH Name of default \"host\"", "\"#error Unknown STRATUM_ARCH\", \"#endif\", ] gen_cmds = [(\"printf '%%s\\\\n' '%s'\"", "around a string. def squote(s): return \"'\" + s +", "for dep in deps.to_list() ], visibility = visibility, ) #", "ppc = [native.FilesetEntry( srcdir = \"%s:BUILD\" % _PPC_GRTE, files =", "changes based on target arch. Generates a blaze alias that", "visibility, testonly = testonly, ) native.py_proto_library( name = py_name, api_version", "\"-lutil\"], testonly = testonly, visibility = visibility, ) register_extension_info( extension_name", "that changes based on target arch. Generates a blaze alias", "cc_test defines argument. copts: Analogous to cc_test copts argument. linkopts:", "the variables/lists: ALL_ARCHES All known arches. EMBEDDED_ARCHES All embedded arches.", "ppc = [\"STRATUM_ARCH_PPC\"], x86 = [\"STRATUM_ARCH_X86\"], ) STRATUM_INTERNAL = [", "x86 builds. default: The value to use for any of", "= [protoc_label] grpc_tools = [protoc_label, grpc_plugin] protoc = \"$${g3}/%s\" %", "the symbols generated by protoc, as well as the include", "1 if not arches: arches = ALL_ARCHES defs_plus = (defines", "have python support, and may not use the proto_include field", "_loc(gen_pb_h)), \"cp %s.pb.cc %s\" % (temp_stem, _loc(gen_pb_cc)), ] pb_outs =", "# Use this for program-sizing build \"-g\", # Don't use", "proto_src_loc)) # By cd-ing to another directory, we force protoc", "= None, visibility = None, xdeps = None): \"\"\"Creates rules", "visibility = None): \"\"\"Public macro to package binaries and data", "adjust our .proto path accordingly. proto_src_loc = \"%s/%s\" % (native.package_name(),", "proto_src_loc)), \"then cd %s\" % full_proto_include, \"else cd $(GENDIR)/%s\" %", "= 0, visibility = visibility, ) dep_set = depset(deps) |", "\"HOST\", include_fmt % \"host\", \"#else\", \"#error Unknown STRATUM_ARCH\", \"#endif\", ]", "srcs.to_list() if not s.endswith(\".h\")]: alwayslink = 1 if not arches:", "to cc_library copts argument. defines: Symbols added as \"-D\" compilation", "for this library. srcs: List of .proto files - private", "constructing commands. Args: target: Blaze target name available to this", "), data = data or [], defines = defines, copts", "visibility, ) for arch in arches: _gen_proto_lib( name = name,", "replacing \"//\" and \":\". def _make_filename(label): if label.startswith(\"//\"): # //foo/bar:bat/baz", "visibility argument. \"\"\" cc_test( name = name, size = size", "as header, not for flags, libs, etc. \"\"\" alwayslink =", "gcc invocations. _EMBEDDED_FLAGS = [ \"-O0\", # Don't use this", "TODO(unknown): Figure out how we can use $(CC_FLAGS) instead of", "proto_src_loc) gendir_include = [\"-I$(GENDIR)\", \"-I.\"] # Generate messages gen_pb_h =", "= None, data = None, deps = None, arches =", "Include path for generated sc_cc_libs. grpc_shim_rule: If needed, the name", "= decorate(name, \"py\") proto_library( name = regular_proto_name, srcs = srcs,", "(label, suffix) elif label.startswith(\"//\"): # //foo/bar -> //foo/bar:bar_suffix return \"%s:%s_%s\"", "srcs argument. deps: Analogous to cc_test deps argument. data: Analogous", "= sc_platform_filter(srcs, [], arches), copts = copts, defines = defs_plus,", "cc_binary copts argument. defines: Symbols added as \"-D\" compilation options.", "be compiled - defaults to all if left unstated. Internally,", "protoc ${src}.proto => ${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb rule to run protoc w/", "gendir_include + proto_rollup_flags + [ \"-I%s\" % protobuf_include, \"--grpc-cpp_out=%s\" %", "testonly, textual_hdrs = gen_hdrs, visibility = visibility, ) if grpc_shim_rule:", "= [arch], copts = [], includes = includes, testonly =", "which then calls sc_cc_lib with same name for each arch;", "depends on ${src}_${arch}_proto. Args: name: Base name for this library.", "= gen_hdrs, visibility = visibility, ) if grpc_shim_rule: grpc_name =", "label[1:] else: # bat/baz -> bat/baz return label # Adds", "generated by protoc, as well as the include paths used", "user defined configuration fragment would be a much cleaner solution.", "cc_library deps argument. srcs: Analogous to cc_library srcs argument. hdrs:", "architectures for which they should be compiled - defaults to", "${name}_${arch} fileset containing the corresponding bin and data filesets, mapped", "if proto_include: # We'll be cd-ing to another directory before", "= \"$${g3}/%s\" % _loc(grpc_plugin) cpp_out = \"$${g3}/$(GENDIR)/%s/%s\" % (native.package_name(), arch)", "\"$$(if [[ -e %s ]]; then echo -IG3LOC/%s; fi)\" %", "\"proto_library\") load(\"@rules_cc//cc:defs.bzl\", \"cc_binary\", \"cc_library\", \"cc_test\") # Generic path & label", "blaze testonly parameter. proto_include: Path to add to include path.", "of the alias for host builds. ppc: The result of", "= testonly, textual_hdrs = grpc_gen_hdrs_plus, visibility = visibility, ) def", "return '\"' + s + '\"' # Adds squotes around", "should be compiled - defaults to all if left unstated.", "2, deps = [regular_proto_name], visibility = visibility, testonly = testonly,", "if default == None and (host == None or ppc", "Standard blaze testonly parameter. textual_hdrs: Analogous to cc_library. visibility: Standard", "Which architectures to build this library for, None => EMBEDDED_ARCHES", "= None, arches = None, copts = None, defines =", "currently selected platform architecture. Args: name: Base name for this", "Analogous to cc_library. visibility: Standard blaze visibility parameter. xdeps: External", "arch, visibility = visibility, testonly = testonly, proto_include = proto_include,", "+ proto_rollup_flags + [ \"-I%s\" % protobuf_include, \"--grpc-cpp_out=%s\" % cpp_out,", "+ \"/\" + hdr_stem + \".%s\" + hdr_ext) lines =", "a blaze select(...) statement that can be used in most", "[\"g3=$${PWD}\"] # TODO(unknown): Switch protobuf to using the proto_include mechanism", "of the generated alias. Returns: Name of shim rule for", "result.pop() level += -1 continue else: level += 1 result.append(d)", "arches, visibility): \"\"\"Macro to build .pb.h multi-arch master switch for", "binary and arches. Args: name: Analogous to cc_binary name argument.", "= visibility, ) def _gen_proto_shims(name, pb_modifier, srcs, arches, visibility): \"\"\"Macro", "protobuf_include, \"--cpp_out=%s\" % cpp_out, proto_src_loc, ]), \"cd $${g3}\", \"cp %s.pb.h", "decorate(decorate(dep, arch), \"proto_rollup.flags\") for dep in deps if dep.endswith(\"_proto\") ]", "[], arches), copts = copts, defines = defs_plus, includes =", "Stratum platform arch for .pb.h shims and other portability hacks.", "\"--cpp_out=%s\" % cpp_out, proto_src_loc, ]), \"cd $${g3}\", \"cp %s.pb.h %s\"", "build into this library, but also exported for dependent rules", "= value if \"x86\" in arches else default, ) def", "cpp_out, proto_src_loc, ]), \"cd $${g3}\", \"cp %s.pb.h %s\" % (temp_stem,", "entries = [ native.FilesetEntry( srcdir = decorate(name, \"bin\"), destdir =", "src) if proto_src_loc.startswith(full_proto_include + \"/\"): proto_src_loc = proto_src_loc[len(full_proto_include) + 1:]", "name, pb_modifier = \".pb\", srcs = srcs + hdrs, arches", "set into sc_cc_lib to wrap them them up into a", "\"/\" level = 0 result = [] for d in", "visibility = visibility, ) # Add any platform specific files", "shim_rule = decorate(name, \"shims\") native.genrule( name = shim_rule, srcs =", "full_proto_include = None if proto_include == \".\": full_proto_include = native.package_name()", "deps.to_list() ], visibility = visibility, ) # Add any platform", "outs = outs, cmd = \"; \".join(cmds), visibility = visibility,", "and all dependency packages. * ${name}_${arch} fileset containing the corresponding", "defines = None, copts = None, linkopts = None, visibility", "deps: List of other sc_packages to add to this package.", "selected. During embedded builds this target will generate a dummy", "instead of this. \"-D__GOOGLE_STL_LEGACY_COMPATIBILITY\", ] # Used for C and", "# This is more general than it may seem: genrule", "of the alias for x86 builds. default: The result of", "proto_include = None, python_support = False, services = []): \"\"\"Public", "{\"grpc\", \"rpc\"}; Only \"grpc\" is supported. So \"rpc\" and \"grpc\"", "label.startswith(\"//\"): # //foo/bar -> //foo/bar:bar_suffix return \"%s:%s_%s\" % (label, label.split(\"/\")[-1],", "generated alias. Returns: Name of shim rule for use in", "hdr_stem = filename[0:-6] new_hdr_name = hdr_stem + hdr_ext outs.append(new_hdr_name) #", "), ] + [ native.FilesetEntry(srcdir = decorate(dep, extension)) for dep", "# Use this for program-sizing build ] # PPC ======================================================================", "# might be generated on another forge server. proto_path_cmds =", "includes = None, include_prefix = None, strip_include_prefix = None, data", "of PowerPC arch - \"ppc\". EMBEDDED_X86 Name of \"x86\" arch.", "be packaged. data: List of files (and file producing rules)", "input paths will stay invalid. \"\"\" sep = \"/\" level", "name: Base name for this library. srcs: List of proto", "package binaries and data for deployment. For package \"name\", generates:", "0 result = [] for d in path.split(sep): if d", "data = None, defines = None, copts = None, linkopts", "= [\"STRATUM_ARCH_HOST\"], ppc = [\"STRATUM_ARCH_PPC\"], x86 = [\"STRATUM_ARCH_X86\"], ) STRATUM_INTERNAL", "shim_rule def _gen_py_proto_lib(name, srcs, deps, visibility, testonly): \"\"\"Creates a py_proto_library", "default, ) def sc_platform_alias( name, host = None, ppc =", ") grpc_shim_rule = None if (service_enable[\"grpc\"]): grpc_shim_rule = _gen_proto_shims( name", "native.FilesetEntry(srcdir = decorate(dep, extension)) for dep in deps.to_list() ], visibility", "[] grpc_gen_srcs = [] tools = [protoc_label] grpc_tools = [protoc_label,", "are disjoint. sc_cc_lib( name = decorate(name, arch), deps = dep_set,", "Path to add to include path. This will affect the", "data = sc_platform_filter(data, [], arches), visibility = visibility, ) register_extension_info(", "\"%s/%s\" % (native.package_name(), src) proto_path_cmds.append(\"[[ -e %s ]] || cd", "= None if proto_include == \".\": full_proto_include = native.package_name() elif", "def _gen_py_proto_lib(name, srcs, deps, visibility, testonly): \"\"\"Creates a py_proto_library from", "sc_platform_filter(hdrs, [], arches), alwayslink = alwayslink, copts = sc_platform_filter(copts, [],", "defines = defs_plus, includes = sc_platform_filter(includes, [], arches), include_prefix =", "keep this simple. For library \"name\", generates: * ${name}_default_pb, a", "%s.pb.cc %s\" % (temp_stem, _loc(gen_pb_cc)), ] pb_outs = [gen_pb_h, gen_pb_cc]", "0, visibility = visibility, ) dep_set = depset(deps) | [protobuf_label]", "Paths to add as \"-I\" compilation options. testonly: Standard blaze", "proto_include on an sc_proto_lib with python support.\") _gen_py_proto_lib( name =", "that ${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto. Args: name: Base name for", "+ [grpc_shim_rule], arches = [arch], copts = [], includes =", "version of this library. Only generated if python_support == True.", "library, but also exported for dependent rules to utilize. deps:", "accum_flags = [] full_proto_include = None if proto_include == \".\":", "add to this package. arches: Which architectures to build this", "= None, visibility = None): \"\"\"Creates rules for the given", "cpp_out = \"$${g3}/$(GENDIR)/%s/%s\" % (native.package_name(), arch) accum_flags = [] full_proto_include", "gen_hdrs sc_cc_lib( name = decorate(grpc_name, arch), deps = grpc_dep_set, srcs", "dummy binary and will not attempt to build any dependencies.", "_gen_proto_shims, above. * ${name}_${arch}_pb protobuf compile rules - one for", ") native.Fileset( name = fileset_name, out = name, entries =", "== \"rpc\": service_enable[\"grpc\"] = 1 else: fail(\"service='%s' not in (grpc,", "\"\"\"Public macro to package binaries and data for deployment. For", "by removing unnecessary path-up segments and its corresponding directories. Providing", "to bin/ and share/ respectively. * ${name}_${arch}_tarball rule builds that", "dep in deps if dep.endswith(\"_proto\") ] proto_rollup_cmds = [\"printf '%%s\\n'", "&& \".join(gen_cmds), new_hdr_loc)) shim_rule = decorate(name, \"shims\") native.genrule( name =", "through to all filesets. \"\"\" bins = depset(bins or [])", "[], default = []), deps = sc_platform_select( host = deps", "symbols needed. The public symbols are the macros: decorate(path) sc_cc_lib", "For declaring Stratum internal visibility. The macros are like cc_library(),", "at runtime (host builds only). testonly: Standard blaze testonly parameter.", "(temp_stem, _loc(gen_pb_cc)), ] pb_outs = [gen_pb_h, gen_pb_cc] native.genrule( name =", "[], deps = [], arches = [], visibility = None,", "each arch. * sc_cc_lib(name) with those as input. * ${name}_py", "$${TEMP_DIR}/tarball/bin/*.stripped\", \" do mv $${f} $${f%.stripped}\", # rename not available.", "if \"x86\" in arches else None, visibility = visibility, )", "= visibility, ) my_proto_rollup = decorate(name_arch, \"proto_rollup.flags\") protoc_srcs_set = (srcs", "# Note: Public sc_proto_lib invokes this once per (listed) arch;", "_gen_proto_lib( name = name, srcs = srcs, hdrs = [shim_rule]", "= deps, visibility = visibility, testonly = testonly, ) register_extension_info(", "package. arches: Which architectures to build this library for, None", "${name}_${arch}_data filesets containing respectively all of the binaries and all", "deps for this library arches: Which architectures to build this", "of proto files. arches: List of arches this shim should", "hdrs = hdrs + grpc_gen_hdrs_plus + [grpc_shim_rule], arches = [arch],", "= testonly, ) for src in srcs + hdrs: if", "in a BUILD file, specifying the symbols needed. The public", "xdeps = depset(xdeps or []) copts = depset(copts or [])", "% (src_stem, arch) # We can't use $${PWD} until this", "= None, bins = None, data = None, deps =", "the include paths used for both sc_cc_lib and sc_proto_lib rules", "and its corresponding directories. Providing own implementation because import os", "protobuf_srcs = [protobuf_hdrs] protobuf_include = \"$${g3}/protobuf/src\" if arch in EMBEDDED_ARCHES:", "None, x86 = None, default = None): \"\"\"Public macro to", "arches else default, x86 = value if \"x86\" in arches", "rule that interacts safely with Stratum builds. Generates a cc_test", "select path.${arch}.pb.h Also generates an alias that will select the", "%s.pb.h %s\" % (temp_stem, _loc(gen_pb_h)), \"cp %s.pb.cc %s\" % (temp_stem,", "def sc_platform_alias( name, host = None, ppc = None, x86", "host = deps or [], default = [\"//stratum/portage:dummy_with_main\"], ), data", "= outs, cmd = \" && \".join(cmds) or \"true\", )", "to alter a blaze rule based on the target platform", "deps, arch, visibility, testonly, proto_include, grpc_shim_rule): \"\"\"Creates rules and filegroups", "List of proto files. arches: List of arches this shim", "filesets. \"\"\" bins = depset(bins or []) data = depset(data", "testonly parameter. textual_hdrs: Analogous to cc_library. visibility: Standard blaze visibility", "use for ppc builds. x86: The value to use for", "match src %s\" % (full_proto_include, proto_src_loc)) # By cd-ing to", "macro to create an alias that changes based on target", "consisting of: #ifdef logic to select path.${arch}.pb.h Also generates an", "srcs: List of proto files hdrs: More files to build", "defines = None, includes = None, include_prefix = None, strip_include_prefix", "arches: if testonly: arches = HOST_ARCHES else: arches = ALL_ARCHES", "argument. arches: List of architectures to generate this way. copts:", "[], defines = defines, copts = copts, linkopts = linkopts,", "value if \"host\" in arches else default, ppc = value", "= \"host\" HOST_ARCHES = [HOST_ARCH] ALL_ARCHES = EMBEDDED_ARCHES + HOST_ARCHES", "sc_cc_libs. grpc_shim_rule: If needed, the name of the grpc shim", "name = decorate(name_arch, \"headers\"), srcs = hdrs + protoc_deps, visibility", "= [], includes = includes, testonly = testonly, textual_hdrs =", "normal proto_library rules. def sc_proto_lib( name = None, srcs =", "To use this, load() this file in a BUILD file,", "\".\": full_proto_include = native.package_name() elif proto_include: full_proto_include = \"%s/%s\" %", "//foo/bar -> //foo/bar:bar_suffix return \"%s:%s_%s\" % (label, label.split(\"/\")[-1], suffix) else:", "\"$${g3}/protobuf/src\" if arch in EMBEDDED_ARCHES: grpc_plugin = _SC_GRPC_PLUGIN else: grpc_plugin", "# Emulate Python 2.5+ str(startswith([prefix ...]) def starts_with(s, prefix_list): for", "that isn't specified. visibility: The visibility of the alias target.", "those as input. * ${name}_py a py_proto_library version of this", "] + [ native.FilesetEntry(srcdir = decorate(dep, extension)) for dep in", "native.filegroup( name = decorate(name_arch, \"headers\"), srcs = hdrs + protoc_deps,", "argument. linkopts: Analogous to cc_test linkopts argument. visibility: Analogous to", "for this package. bins: List of sc_cc_bin rules to be", "def _make_filename(label): if label.startswith(\"//\"): # //foo/bar:bat/baz -> google3_foo/bar/bat/baz return label.replace(\"//\",", "gen_stem = \"%s.%s\" % (src_stem, arch) # We can't use", "deps, visibility, testonly): \"\"\"Creates a py_proto_library from the given srcs.", "that will select the appropriate target. If no selection is", "\"ppc\" EMBEDDED_X86 = \"x86\" EMBEDDED_ARCHES = [ EMBEDDED_PPC, EMBEDDED_X86, ]", "x86 = x86, ), visibility = visibility, ) # Embedded", "pb_outs = [gen_pb_h, gen_pb_cc] native.genrule( name = src_arch + \".pb\",", "= None, linkopts = None, visibility = None): \"\"\"Creates a", "- private to this library. hdrs: As above, but also", "value to use for any of {host,ppc,x86} that isn't specified.", "\"\"\" sep = \"/\" level = 0 result = []", "in path.split(sep): if d in (\"\", \".\"): if result: continue", "binaries and all of the data needed for this package", "gen_stem + \".grpc.pb.h\" gen_grpc_pb_cc = gen_stem + \".grpc.pb.cc\" grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc)", "% protobuf_include, \"--cpp_out=%s\" % cpp_out, proto_src_loc, ]), \"cd $${g3}\", \"cp", "\"then for f in $${TEMP_DIR}/tarball/bin/*.stripped\", \" do mv $${f} $${f%.stripped}\",", "files to build into this library, but also exported for", "wrap them them up into a usable library; note that", "= None, copts = None, linkopts = None, visibility =", "builds. x86: The value to use for x86 builds. default:", "\"grpc\": service_enable[\"grpc\"] = 1 elif service == \"rpc\": service_enable[\"grpc\"] =", "not available. \"done\", \"fi\", \"tar czf %s -h -C $${TEMP_DIR}/tarball", "Careful, our proto might be in GENDIR! proto_path_cmds.append(\"; \".join([ \"if", "= tools, cmd = \" && \".join(cmds), heuristic_label_expansion = 0,", "this package. arches: Which architectures to build this library for,", "Standard blaze visibility argument. testonly: Standard blaze testonly argument. \"\"\"", "_EMBEDDED_CXXFLAGS = [ \"-std=gnu++11\", # Allow C++11 features _and_ GNU", "[], arches), defines = defs_plus, includes = sc_platform_filter(includes, [], arches),", "sc_proto_lib rules that depend on this rule. Typically \".\" python_support:", "= []), deps = sc_platform_select( host = deps or [],", "sc_platform_filter(data, [], arches), visibility = visibility, ) register_extension_info( extension_name =", "rules) to be packaged. deps: List of other sc_packages to", "target. \"\"\" native.alias( name = name, actual = sc_platform_select( default", "must be provided for every target platform. Args: host: The", "cmd = \" && \".join(cmds) or \"true\", ) sc_platform_alias( name", "file. # Lines expand inside squotes, so quote accordingly. include_fmt", "${name}_default_pb. Args: name: Standard blaze name argument. srcs: Standard blaze", "to include path. This will affect the symbols generated by", "C++ compiler invocations. _EMBEDDED_CXXFLAGS = [ \"-std=gnu++11\", # Allow C++11", "If needed, the name of the grpc shim for this", "hdrs or deps # attributes, so all embedded dependencies appear", "outs = pb_outs, tools = tools, cmd = \" &&", "grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc] native.genrule( name = src_arch + \".grpc.pb\",", "testonly parameter. visibility: Standard blaze visibility parameter. \"\"\" deps =", "'%%s\\n' %s\" % flag for flag in accum_flags] proto_rollup_cmds.append(\"cat $(SRCS)\")", "attributes, so all embedded dependencies appear as a `src'. #", "= dep_set | [name] | _SC_GRPC_DEPS grpc_gen_hdrs_plus = grpc_gen_hdrs +", "select the appropriate proto target based on the currently selected", "= \".grpc.pb\", srcs = srcs + hdrs, arches = arches,", "srcs = srcs, deps = [decorate(dep, \"default_pb\") for dep in", "visibility, testonly, proto_include, grpc_shim_rule): \"\"\"Creates rules and filegroups for embedded", "hdrs + gen_hdrs, arches = [arch], copts = [], includes", "label[:-1] if \":\" in label: # .../bar:bat -> .../bar:bat_suffix return", "portable .proto Library. sc_cc_bin Declare a portable Binary. sc_package Declare", "a blaze alias that will select the appropriate target. If", "or \"//stratum/portage:dummy\", host = host, ppc = ppc, x86 =", "given srcs. There's no clean way to make python protos", "the alias for x86 builds. default: The result of the", "py_proto_library based on ${name}_default_pb. Args: name: Standard blaze name argument.", "rules to utilize. deps: List of deps for this library", "+ hdr_ext outs.append(new_hdr_name) # Generate lines for shim switch file.", "= [] gen_hdrs = [] grpc_gen_hdrs = [] grpc_gen_srcs =", "-u -o $(@)\" % \"; \".join(proto_rollup_cmds) native.genrule( name = decorate(name_arch,", "\"sc_proto_lib\", label_regex_for_dep = \"{extension_name}\", ) def sc_package( name = None,", ".pb.h multi-arch master switch for sc_proto_lib. For each src path.proto,", "for the given portable library and arches. Args: name: Analogous", "this library. hdrs: As above, but also exported for dependent", "bit of extra work with these include flags to avoid", "hdrs = hdrs + gen_hdrs, arches = [arch], copts =", "deps = None, srcs = None, hdrs = None, arches", "default = [\"STRATUM_ARCH_HOST\"], ppc = [\"STRATUM_ARCH_PPC\"], x86 = [\"STRATUM_ARCH_X86\"], )", "name, deps = None, srcs = None, arches = None,", "for constructing commands. Args: target: Blaze target name available to", "None, defines = None, includes = None, testonly = None,", ") return shim_rule def _gen_py_proto_lib(name, srcs, deps, visibility, testonly): \"\"\"Creates", "src_arch + \".pb\", srcs = protoc_srcs_set, outs = pb_outs, tools", "= \"%s/%s\" % (cpp_out, native.package_name()[len(full_proto_include):]) # We do a bit", "None, hdrs = None, arches = None, copts = None,", "arches = [arch], copts = [], includes = includes, testonly", "label.replace(\"//\", \"google3/\").replace(\":\", \"/\") elif label.startswith(\":\"): # :bat/baz -> bat/baz return", "%s; } > %s\" % (\" && \".join(gen_cmds), new_hdr_loc)) shim_rule", "[]), deps = sc_platform_select( host = deps or [], default", "to # compile this proto. native.filegroup( name = decorate(name_arch, \"headers\"),", "% (native.package_name(), src) proto_path_cmds.append(\"[[ -e %s ]] || cd $(GENDIR)\"", "None, textual_hdrs = None, visibility = None, xdeps = None):", "temp_prefix = \"%s/%s\" % (cpp_out, native.package_name()[len(full_proto_include):]) # We do a", "List of deps for this library arches: Which architectures to", "run protoc w/ erpc plugin: ${src}.proto => ${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup collects", "lines = [ \"#if defined(STRATUM_ARCH_%s)\" % \"PPC\", include_fmt % \"ppc\",", "= decorate(name, \"host\") if \"host\" in arches else None, ppc", "= decorate(name, extension), entries = [ native.FilesetEntry( files = inputs,", "[ \"-I%s\" % protobuf_include, \"--cpp_out=%s\" % cpp_out, proto_src_loc, ]), \"cd", "doesn't match src %s\" % (full_proto_include, proto_src_loc)) # By cd-ing", "in (grpc, rpc)\" % service) deps = depset(deps or [])", "binaries and data for deployment. For package \"name\", generates: *", "= None, includes = None, include_prefix = None, strip_include_prefix =", "rules. testonly: Standard blaze testonly parameter. proto_include: Include path for", "== True. Args: name: Base name for this library. srcs:", "\"ppc\") if \"ppc\" in arches else None, x86 = decorate(name,", "Creates a relative filename from a label, replacing \"//\" and", "= \"dereference\", )], default = [], ) native.Fileset( name =", "includes = includes, testonly = testonly, textual_hdrs = gen_hdrs, visibility", "- one for each arch. * sc_cc_lib(name) with those as", "argument. data: Files to provide as data at runtime (host", "final tarball. platform_entries = sc_platform_select( # We use a different", "None, data = None, defines = None, copts = None,", "if proto_src_loc.startswith(full_proto_include + \"/\"): proto_src_loc = proto_src_loc[len(full_proto_include) + 1:] else:", "- \"ppc\". EMBEDDED_X86 Name of \"x86\" arch. HOST_ARCH Name of", "blaze testonly parameter. textual_hdrs: Analogous to cc_library. visibility: Standard blaze", "no decorations assumed, used and exported as header, not for", "* ${name}_${arch}_pb protobuf compile rules - one for each arch.", "inside squotes, so quote accordingly. include_fmt = \"#include \" +", "'\"' + s + '\"' # Adds squotes around a", "lists. \"\"\" outs = [] cmds = [] hdr_ext =", "to produce # different symbols. Careful, our proto might be", "library for, None => ALL. visibility: Standard blaze visibility parameter,", "outs = outs, cmd = \" && \".join(cmds) or \"true\",", "a regular proto library. * ${name}_py, a py_proto_library based on", "generate a python proto library from this rule. Any sc_proto_lib", "Name of shim rule for use in follow-on hdrs and/or", "hdr_ext) lines = [ \"#if defined(STRATUM_ARCH_%s)\" % \"PPC\", include_fmt %", "program-sizing build #-- \"-Os\", # Use this for program-sizing build", "\"ppc\" in arches else None, x86 = decorate(name, \"x86\") if", "if left unstated. Internally, libraries and binaries are generated for", "package \"name\", generates: * ${name}_${arch}_bin and ${name}_${arch}_data filesets containing respectively", "\"proto_rollup.flags\") protoc_srcs_set = (srcs + hdrs + protoc_deps + protobuf_srcs", "to build this library for. visibility: Standard blaze visibility parameter,", "None def sc_platform_select(host = None, ppc = None, x86 =", "%s ]]\" % (\"%s/%s\" % (full_proto_include, proto_src_loc)), \"then cd %s\"", "= None, hdrs = None, arches = None, copts =", "to subsequent rules. testonly: Standard blaze testonly parameter. proto_include: Path", "# :bat/baz -> bat/baz return label[1:] else: # bat/baz ->", "= sc_platform_select( # We use a different ppc toolchain for", "solution. Currently supported architectures: ppc x86 \"\"\" load(\"//tools/build_defs/label:def.bzl\", \"parse_label\") load(", "If no selection is provided for a given platform, {default}", "gen_srcs.append(gen_pb_cc) cmds = bash_vars + [ \"mkdir -p %s\" %", "will not attempt to build any dependencies. Args: name: Analogous", "copts = copts, defines = defs_plus, includes = includes, linkopts", "d == \"..\": if level > 0: result.pop() level +=", "and force fixing warnings. \"-no-canonical-prefixes\", # Don't mangle paths and", "config_label_prefix = \"//stratum:stratum_\" return select({ \"//conditions:default\": (host or default), config_label_prefix", "not for flags, libs, etc. \"\"\" alwayslink = 0 deps", "% b for b in bins.to_list()]), (\"data\", data), ]: native.Fileset(", "extension)) for dep in deps.to_list() ], visibility = visibility, )", "[], arches), srcs = sc_platform_filter(srcs, [], arches), hdrs = sc_platform_filter(hdrs,", "a dummy binary and will not attempt to build any", "and will not attempt to build any dependencies. Args: name:", "hdrs + grpc_gen_hdrs_plus + [grpc_shim_rule], arches = [arch], copts =", "-> //foo/bar:bar_suffix return \"%s:%s_%s\" % (label, label.split(\"/\")[-1], suffix) else: #", "of architectures for which they should be compiled - defaults", "s in srcs.to_list() if not s.endswith(\".h\")]: alwayslink = 1 if", "_gen_py_proto_lib(name, srcs, deps, visibility, testonly): \"\"\"Creates a py_proto_library from the", "The result of the alias for any of {host,ppc,x86} that", "%s; } | sort -u -o $(@)\" % \"; \".join(proto_rollup_cmds)", "srcs = srcs, hdrs = [shim_rule] + hdrs, deps =", "% (full_proto_include, proto_src_loc)) # By cd-ing to another directory, we", "= depset(hdrs or []) xdeps = depset(xdeps or []) copts", "arches), copts = copts, defines = defs_plus, includes = includes,", "package. bins: List of sc_cc_bin rules to be packaged. data:", "this shim should support. visibility: The blaze visibility of the", "in deps if dep.endswith(\"_proto\") ] proto_rollup_cmds = [\"printf '%%s\\n' %s\"", "compilation options. includes: Paths to add as \"-I\" compilation options.", "cc_test linkopts argument. visibility: Analogous to cc_test visibility argument. \"\"\"", "name = decorate(name, \"tarball\"), srcs = [\":%s\" % fileset_name], outs", "in arches: _gen_proto_lib( name = name, srcs = srcs, hdrs", "= sc_platform_select( default = default or \"//stratum/portage:dummy\", host = host,", "= grpc_pb_outs, tools = grpc_tools, cmd = \" && \".join(cmds),", "+ [ native.FilesetEntry(srcdir = decorate(dep, extension)) for dep in deps.to_list()", "visibility = visibility, testonly = testonly, ) # TODO(unknown): Add", "path equivalent to the input path with minimal use of", "and C++ compiler invocations. _EMBEDDED_CFLAGS = [ \"-I$(GENDIR)\", ] #", "name argument. deps: Analogous to cc_binary deps argument. srcs: Analogous", "deps argument. srcs: Analogous to cc_library srcs argument. hdrs: Analogous", "key difference: you can supply lists of architectures for which", "fi)\" % (full_proto_include, full_proto_include), ) else: temp_prefix = \"%s/%s\" %" ]
[ "is IOSv\", \"uptime\": \"1 day, 16 hours, 42 minutes\" }", "= disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE variable = BOOTLDR variable = Configuration register", "version_obj = ShowVersion(device=self.dev1) with self.assertRaises(KeyError): parsed_output = version_obj.parse() def test_golden_iosv(self):", "Available system uptime = 0 minutes Switchovers system experienced =", "self.assertEqual(parsed_output, self.golden_parsed_output_ios_1) def test_golden_ios_2(self): self.maxDiff = None self.dev_iosv = Mock(**self.device_output)", "CEF720 24 port 1000mb SFP Rev. 1.0\", \"pid\": \"WS-X6824-SFP\", \"vid\":", "Systems, Inc. Compiled Fri 05-Aug-11 00:32 by prod_rel_team ROM: System", "GMT Tue Sep 10 2019', 'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason': 'power-on', 'license_level':", "'21:57:23 UTC Sat Aug 28 2010', 'returned_to_rom_by': 'power cycle', \"rtr_type\":", "F0 Version ID : V07 CLEI Code Number : CMMPP00DRB", "'pid': 'GLC-SX-MMD', 'sn': 'ACW102938VS', 'vid': 'V01 ', }, }, },", "{ 'NM-1T3/E3=': { 'descr': 'Clear/Subrate T3/E3 WAN', 'name': 'Clear/Subrate T3/E3", "PID Runtime(ms) Invoked uSecs 5Sec 1Min 5Min TTY Process 368", "= None self.dev_iosv = Mock(**self.golden_output_iosv) platform_obj = ShowBootvar(device=self.dev_iosv) parsed_output =", "'NM-1T3/E3=', 'sn': 'FOC28476ADM', 'vid': 'V01 ', }, }, }, '16':", "'C3900-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'Reload Command', 'last_reload_type': 'Normal Reload',", "\"chassis_sn\": \"9K66Z7TOKAACDEQA24N7S\", \"chassis\": \"IOSv\", \"image_id\": \"VIOS-ADVENTERPRISEK9-M\", 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed", "as test_show_platform_iosxe,\\ TestShowPlatformPower as test_show_platform_power_iosxe,\\ TestShowVersionRp as test_show_version_rp_iosxe,\\ TestShowProcessesCpu as", "}, }, }, } golden_output_8 = {'execute.return_value': ''' NAME: \"3825", "\"pid\": \"PWR-1400-AC\", \"vid\": \"V01\", \"sn\": \"ABC0830J127\", } } }, },", "DESCR: \"WS-C1010XR-48FPS-I\" PID: WS-C1010XR-48FPS-I, VID: V05 , SN: FD2043B0K3 NAME:", "= None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output =", "{ 'WS-C3210X-48T-S': { 'descr': 'WS-C3210X-48', 'name': '2', 'pid': 'WS-C3210X-48T-S', 'sn':", "Next reload license Level: ipservices cisco WS-C3750X-24P (PowerPC405) processor (revision", "VID: , SN: FXS170802GL NAME: \"CLK-7600 2\", DESCR: \"OSR-7600 Clock", "WS-C6503-E (R7000) processor (revision 1.4) with 983008K/65536K bytes of memory.", "platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_asr1k) class test_show_platform_power(test_show_platform_power_iosxe): def test_empty(self): self.device = Mock(**self.empty_output)", "self.device = Mock(**self.golden_output_6) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output,", "'descr': '16 Port 10BaseT/100BaseTX EtherSwitch', 'name': '16 Port 10BaseT/100BaseTX EtherSwitch", "= obj.parse(slot='0') self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_serdes_internal) def test_empty(self):", "\"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6\" PID: WS-X6748-GE-TX", "DSP SIMM with four DSPs\" PID: PVDM2-64 , VID: V01", "boot loader', 'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE", "= Mock(**self.golden_output_c3850) platform_obj = ShowModule(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class", "s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) Technical Support:", "Module on Slot 0\", DESCR: \"Encryption AIM Element\" PID: AIM-VPN/SSL-2", "Rev. 3.1\" PID: WS-SUP720 , VID: , SN: SAL11434N9G NAME:", "Forwarding Card 4 Rev. 1.2\" PID: WS-F6K-DFC4-E , VID: V02,", "interface daughtercard', 'name': 'Six port FXO voice interface daughtercard on", "8 weeks, 3 days, 10 hours, 27 minutes System returned", "TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\\ TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\\ TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\\ TestShowPlatformHardwareSerdesStatistics", "Cisco Systems, Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team ROM:", "Number : F0 Version ID : V07 CLEI Code Number", "'pid': 'C3KX-PWR-007CBA', 'sn': 'LTP13579L3R', 'vid': 'V01L ', }, }, '2/1/1':", "\"FXS1712Q1R8\", } } }, \"slot\": { \"CLK-7600 1\": { \"other\":", "----- ----- ---------- ---------- * 1 30 WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M", "Ethernet interfaces 1 Virtual Private Network (VPN) Module DRAM configuration", "BRI U (2091, 3086) on Slot 0 SubSlot 1\", DESCR:", "\"X2-10GB-SR\", \"sn\": \"ONT170202T1\", \"vid\": \"V06 \", } }, \"5\": {", "= ShowPlatformHardwareQfpStatisticsDrop( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(status='active') def test_golden_active(self):", "Ethernet': '28', 'FastEthernet': '1' }, 'os': 'IOS', 'platform': 'C3750E', 'processor_type':", "test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\\ TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\\ TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\\", "EtherSwitch NM on Slot 2 SubSlot 0\", DESCR: \"Gigabit(1000BaseT) module", "{ \"active\": { \"boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\", \"configuration_register\": \"0x2012\" }, \"next_reload_boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\"", "'CISCO3945-CHASSIS', 'sn': 'FGL161010K8', 'vid': 'V05 ', }, }, }, 'slot':", "{ 'chassis': { 'CISCO3945-CHASSIS': { 'descr': 'CISCO3945-CHASSIS', 'name': 'CISCO3945-CHASSIS', 'pid':", "serial number : FDO2028F1WK Top Assembly Part Number : 800-38990-01", "'sn': 'FOC65428K9F', 'vid': 'V01 ', }, }, '1': { 'WIC-1B-U-V2':", "Forwarding Card 3 Rev. 1.1\", \"name\": \"switching engine sub-module of", "for EtherSwitch NM on Slot 2 SubSlot 0\", DESCR: \"Gigabit(1000BaseT)", "bytes of memory. Processor board ID FXS1821Q2H9 SR71000 CPU at", "Mock(**self.semi_empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaMissingKeyError): parsed_output = dir_obj.parse() def", "14 Virtual Ethernet interfaces 1 FastEthernet interface 28 Gigabit Ethernet", "} }, \"6\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR", "WS-F6K-DFC4-E , VID: V02, SN: SAL171846RF NAME: \"Transceiver Te2/1\", DESCR:", "NAME: \"c3845 Motherboard with Gigabit Ethernet on Slot 0\", DESCR:", "ShowModule,\\ ShowSwitch, ShowSwitchDetail from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as test_show_platform_iosxe,\\ TestShowPlatformPower", "\"V02\", \"sn\": \"DCH183500KW\", } } }, \"PS 1 PWR-1400-AC\": {", ", SN: FOC729346GQ NAME: \"Virtual Private Network (VPN) Module on", "enabled. 512K bytes of flash-simulated non-volatile configuration memory. Base ethernet", "reload reason: Reload command This product contains cryptographic features and", "\"264\", \"size\": \"0\", \"permissions\": \"drw-\" }, \"nvram\": { \"last_modified_date\": \"Oct", "}, 2: { \"invoked\": 1466728, \"usecs\": 2442, \"tty\": 0, \"one_min_cpu\":", "147988420 Mar 29 2017 00:00:00 +00:00 vios-adventerprisek9-m 268 -rw- 524288", "}, } } }, \"2\": { \"lc\": { \"WS-X6816-10GE\": {", "self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def test_golden(self): self.device =", "Transceiver 10Gbase-SR Te2/6\", \"name\": \"Transceiver Te2/6\", \"pid\": \"X2-10GB-SR\", \"sn\": \"FNS153920YJ\",", "{ 'EM-HDA-6FXO': { 'descr': 'Six port FXO voice interface daughtercard',", "None data datak9 Permanent datak9 Configuration register is 0x2102 '''}", "{ 'descr': 'LLL Power Supply', 'name': 'Switch 1 - Power", "\"V02\", \"sn\": \"SAL1128UPQ9\", \"subslot\": { \"0\": { \"WS-F6700-DFC3CXL\": { \"descr\":", "sub-module of 3\", \"pid\": \"WS-F6K-DFC4-A\", \"sn\": \"SAL171848KL\", \"vid\": \"V04\", }", "of 4\", \"pid\": \"WS-F6700-CFC\", \"sn\": \"SAL13516QS8\", \"vid\": \"V06\", } }", "T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1", "test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device) parsed_output", "PID: WS-C3210X-48T-S , VID: V02 , SN: FD5678Z90P NAME: \"Switch", "minutes System returned to ROM by power-on System restarted at", "Redundant System Information : ------------------------------ Available system uptime = 0", "def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowEnvironment(device=self.dev) with self.assertRaises(SchemaEmptyParserError):", "of non-volatile configuration memory. 2000880K bytes of ATA System CompactFlash", "use encryption. Importers, exporters, distributors and users are responsible for", "VID: , SN: FXS181101V4 NAME: \"1\", DESCR: \"WS-SUP720-3BXL 2 ports", "laws. By using this product you agree to comply with", "'descr': 'High Density Voice Module - 8FXS/DID', 'name': 'High Density", "status='running') def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowVersionRp(device=self.device1) with", "V01D , SN: DTN1504L0E9 NAME: \"TenGigabitEthernet1/1/1\", DESCR: \"SFP-10GBase-SR\" PID: SFP-10G-SR", "regulations. If you are unable to comply with U.S. and", "4 EARL sub-module of 1\", DESCR: \"VS-F6K-PFC4 Policy Feature Card", "configuration memory. 2097152K bytes of ATA System CompactFlash 0 (Read/Write)", "UTC Sat Aug 28 2010', 'returned_to_rom_by': 'power cycle', \"rtr_type\": \"WS-C6503-E\",", "Ethernet\" PID: CISCO3845-MB , VID: V09 , SN: FOC729346GQ NAME:", "= None self.dev_iosv = Mock(**self.golden_output_ios) version_obj = ShowVersion(device=self.dev_iosv) parsed_output =", "\"IOSv\", \"image_id\": \"VIOS-ADVENTERPRISEK9-M\", 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 29-Mar-17 14:05', \"processor_type\":", "Normal Reload Last reload reason: Reload Command This product contains", "0\", DESCR: \"Cisco Services Performance Engine 150 for Cisco 3900", "Ethernet': '28', 'Ten Gigabit Ethernet': '2' }, 'mem_size': { 'flash-simulated", "minutes', 'version': '15.0(1)M7', 'version_short': '15.0', }, } def test_empty(self): self.dev1", "\"3\", DESCR: \"WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0\"", "obj = ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(", "{ \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/16\", \"name\": \"Transceiver", "bits wide with parity enabled. 255K bytes of non-volatile configuration", "150 for Cisco 3900 ISR on Slot 0\", DESCR: \"Cisco", "test_empty(self): self.dev1 = Mock(**self.empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(AttributeError): parsered_output", "ONT170202T1 NAME: \"Transceiver Te1/5\", DESCR: \"X2 Transceiver 10Gbase-SR Te1/5\" PID:", "}, }, }, 'slot': { '0': { 'rp': { 'C3900-SPE150/K9':", "test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) with self.assertRaises(SchemaEmptyParserError):", "System\", \"pid\": \"WS-C6504-E\", \"vid\": \"V01\", \"sn\": \"FXS1712Q1R8\", } } },", "ports Supervisor Engine 720 Rev. 5.6\" PID: WS-SUP720-3BXL , VID:", "Power Supply 1\", DESCR: \"ABC Power Supply\" PID: C3KX-PWR-350WAC ,", "{ \"name\": \"PS 2 PWR-2700-AC/4\", \"descr\": \"2700W AC power supply", "\"index\": \"268\", \"size\": \"524288\", \"permissions\": \"-rw-\" }, \"boot\": { \"last_modified_date\":", "'version': '15.2(2)E8', 'image_id': 'C3750E-UNIVERSALK9-M', 'os': 'IOS', 'image_type': 'production image', 'compiled_date':", ": 800-32727-03 Daughterboard serial number : FDO172217ED System serial number", "NAME: \"Switch 1 - Power Supply 1\", DESCR: \"LLL Power", "SOFTWARE (fc1)', 'rtr_type': 'CISCO3945-CHASSIS', 'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at': '10:27:57 EST Mon", "Top Assembly Part Number : 800-38990-01 Top Assembly Revision Number", "uSecs 5Sec 1Min 5Min TTY Process 368 362874 3321960 109", "}, \"2\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/2\",", "platform_obj = ShowSwitchDetail(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self):", "14 2013 00:00:00 +00:00 config 267 -rw- 147988420 Mar 29", "on Slot 0\", DESCR: \"Cisco Services Performance Engine 150 for", "Hardware Board Revision Number : 0x04 Switch Ports Model SW", "class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev)", "'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 26-Jun-13 09:56', 'hostname': 'R5', 'image_id': 'C3750E-UNIVERSALK9-M',", "Loader (C3750X-HBOOT-M) Version ' '15.2(3r)E, RELEASE SOFTWARE (fc1)', 'chassis': 'WS-C3750X-24P',", "Processor board ID FGL161010K8 2 FastEthernet interfaces 3 Gigabit Ethernet", "'V01D ', }, }, '1/1/1': { 'SFP-10G-SR': { 'descr': 'SFP-10GBase-SR',", "PID: X2-10GB-SR , VID: V06 , SN: FNS153920YJ NAME: \"Transceiver", "{ 'descr': 'C3900 AC Power Supply 1', 'name': 'C3900 AC", "\"invoked\": 1466728, \"usecs\": 2442, \"tty\": 0, \"one_min_cpu\": 0.87, \"process\": \"IOSv", "= ShowProcessesCpu(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_version_rp(test_show_version_rp_iosxe): def", "System returned to ROM by reload System image file is", "'None', }, }, 'main_mem': '2027520', 'mem_size': { 'non-volatile configuration': '255',", "Trunk - T1/E1\" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN:", "{ 'main': { 'chassis': { 'CISCO3825': { 'descr': '3825 chassis',", "Last switchover reason = unsupported Hardware Mode = Simplex Maintenance", "\"descr\": \"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\", \"name\": \"msfc sub-module of", "cpu_platform_obj.parse() class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj =", "PVDM2-64 , VID: V01 , SN: FOC63358WSI NAME: \"High Density", "(c) 1986-2017 by Cisco Systems, Inc. Compiled Wed 29-Mar-17 14:05", "DESCR: \"1000BaseSX SFP\" PID: GLC-SX-MMD , VID: V01 , SN:", "DESCR: \"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\" PID: WS-SUP720 , VID:", "(tm) s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) Technical", "port 10/100/1000mb Ethernet Rev. 3.4\" PID: WS-X6748-GE-TX , VID: V04,", "435457K/87040K bytes of memory. Processor board ID 9K66Z7TOKAACDEQA24N7S 6 Gigabit", "\"-rw-\" } }, \"bytes_total\": \"2142715904\", \"bytes_free\": \"1989595136\" }, \"dir\": \"flash0:/\"", "\"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V02\", \"sn\": \"SAL1128UPQ9\", \"subslot\": { \"0\": {", "= Mock(**self.golden_output_c3850) platform_obj = ShowSwitchDetail(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) if", "obj = ShowPlatformHardware(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe):", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_4) def test_golden_output_5(self): self.maxDiff = None self.device =", "\"name\": \"3\", \"descr\": \"WS-X6824-SFP CEF720 24 port 1000mb SFP Rev.", "Base ethernet MAC Address : AC:F2:C5:FF:55:E7 Motherboard assembly number :", "DESCR: \"X2 Transceiver 10Gbase-SR Te2/5\" PID: X2-10GB-SR , VID: V05", "'CISCO2821', 'sn': 'FTX1234AMWT', 'vid': 'V07 ', }, }, }, 'slot':", "with 524288K bytes of memory. Processor board ID FDO1633Q14S Last", "\"subslot\": { \"0\": { \"WS-F6700-DFC3CXL\": { \"descr\": \"WS-F6700-DFC3CXL Distributed Forwarding", "720 Rev. 5.6\" PID: WS-SUP720-3BXL , VID: V05, SN: SAL11434P2C", "\"tty\": 0, \"one_min_cpu\": 0.87, \"process\": \"IOSv e1000\", \"five_min_cpu\": 2.77, \"runtime\":", "PID: SFP-10G-LR , VID: V02 , SN: ONT182746GZ NAME: \"1\",", "of ATA CompactFlash 2 (Read/Write) 10080K bytes of ATA CompactFlash", "test_show_platform_hardware_iosxe,\\ TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\\ TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\\ TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\\", "{ \"descr\": \"WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2\", \"name\":", "internal=True) self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_slot_internal) def test_empty(self): self.device1", "None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowPlatform(device=self.dev_c3850) parsed_output = platform_obj.parse()", "'rom': 'Bootstrap program is C3750E boot loader', 'bootldr': 'C3750E Boot", "EtherSwitch on Slot 2', 'pid': 'NM-16ESW', 'sn': 'FOC135464KO', 'subslot': {", "program is IOSv N95_1 uptime is 1 day, 16 hours,", "of memory. Processor board ID FXS1821Q2H9 SR71000 CPU at 600Mhz,", "4.1\", \"name\": \"WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4\",", "'prod_rel_team', 'compiled_date': 'Wed 29-Mar-17 14:05', \"processor_type\": \"revision 1.0\", \"platform\": \"IOSv\",", "'Normal Reload', 'license_udi': { 'device_num': { '*0': { 'pid': 'C3900-SPE150/K9',", "\"119\", \"permissions\": \"-rw-\" }, \"config\": { \"last_modified_date\": \"Oct 14 2013", "{ \"boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\", \"configuration_register\": \"0x2012\" }, \"next_reload_boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\" } golden_output_iosv", "def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitchDetail(device=self.dev1) with self.assertRaises(SchemaEmptyParserError):", "dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = {", "AC power supply for CISCO7604 2\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\",", "\"five_sec_cpu_interrupts\": 0 } golden_output_1 = {'execute.return_value': ''' CPU utilization for", "''' best-c3945-IOS3#show inventory NAME: \"CISCO3945-CHASSIS\", DESCR: \"CISCO3945-CHASSIS\" PID: CISCO3945-CHASSIS ,", "inventory_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) inventory_obj", "= Mock(**self.empty_output) platform_obj = ShowSwitch(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse()", "'name': '1', 'pid': 'WS-C0123X-45T-S', 'sn': 'FDO123R12W', 'subslot': { '1': {", "CPU CPU utilization for five seconds: 13%/0%; one minute: 23%;", ", VID: 1.0, SN: 9K66Z7TOKAACDEQA24N7S '''} golden_parsed_output_2 = { \"main\":", "DESCR: \"Two-Port Fast Ethernet High Speed WAN Interface Card\" PID:", "1\", \"pid\": \"WS-C6503-E-FAN\", \"vid\": \"V02\", \"sn\": \"DCH183500KW\", } } },", "= Down Reason: Failure Current Processor Information : ------------------------------- Active", "PID: EM-HDA-6FXO , VID: V03 , SN: FOC85389QXB '''} golden_parsed_output_8", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_slot) def test_golden_subslot(self): self.device = Mock(**self.golden_output_subslot)", "SOFTWARE (fc1) R5 uptime is 9 weeks, 4 days, 2", "1\", \"pid\": \"WS-F6K-PFC3BXL\", \"sn\": \"SAL11434LYG\", \"vid\": \"V01\", }, } },", "Reload command This product contains cryptographic features and is subject", "self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "'license_type': 'Permanent', 'next_reload_license_level': 'ipbasek9', }, 'security': { 'license_level': 'securityk9', 'license_type':", "'''\\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 15.2(2)E8, RELEASE", "ID : V03 CLEI Code Number : CMMFF00ARC Hardware Board", "= { 'version': {'version_short': '15.2', 'platform': 'C3750E', 'version': '15.2(2)E8', 'image_id':", "Services Performance Engine 150 for Cisco 3900 ISR\" PID: C3900-SPE150/K9", "Port 10BaseT/100BaseTX EtherSwitch on Slot 2\", DESCR: \"16 Port 10BaseT/100BaseTX", "04:07', 'compiled_by': 'prod_rel_team', 'rom': 'Bootstrap program is C3750E boot loader',", "Slot 2 SubSlot 0\", DESCR: \"Gigabit(1000BaseT) module for EtherSwitch NM\"", "SN: FD2043B0K3 NAME: \"Switch 1 - Power Supply 1\", DESCR:", "\"descr\": \"Enhanced 3-slot Fan Tray 1\", \"pid\": \"WS-C6503-E-FAN\", \"vid\": \"V02\",", "= ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_7) def test_golden_output_8(self): self.maxDiff", "parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_c3850 =", "{ \"0\": { \"WS-F6700-DFC3CXL\": { \"descr\": \"WS-F6700-DFC3CXL Distributed Forwarding Card", "10GE Rev. 2.0\", \"pid\": \"WS-X6816-10GE\", \"vid\": \"V02\", \"sn\": \"SAL17152QB3\", \"subslot\":", "w/ CTS Rev. 1.5\" PID: VS-SUP2T-10G , VID: V05, SN:", "= Mock(**self.semi_empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(KeyError): parsed_output = version_obj.parse()", "reload license Level: ipservices cisco WS-C3750X-24S (PowerPC405) processor (revision A0)", "Module DRAM configuration is 72 bits wide with parity enabled.", "'image_type': 'production image', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'main_mem':", "If you require further assistance please contact us by sending", "RELEASE SOFTWARE (fc1) R5 uptime is 9 weeks, 4 days,", "AC Power Supply 1': { 'descr': 'C3900 AC Power Supply", "'vid': 'V02 ', }, 'PWR-C2-2929WAC': { 'descr': 'LLL Power Supply',", "sub-module of 2\", \"pid\": \"WS-F6K-DFC4-E\", \"sn\": \"SAL171846RF\", \"vid\": \"V02\", }", "None self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj =", "'IOSv', 'pid': 'IOSv', 'sn': '9K66Z7TOKAACDEQA24N7S', 'vid': '1.0', }, }, },", "chassis', 'pid': 'CISCO3845', 'sn': 'FTX6666ARJ9', 'vid': 'V05 ', }, },", "NAME: \"FAN-MOD-4HS 1\", DESCR: \"High Speed Fan Module for CISCO7604", "84, \"five_sec_cpu\": 0.55 }, 3: { \"invoked\": 116196, \"usecs\": 976,", "= Mock(**self.golden_output) platform_obj = ShowProcessesCpuHistory(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output)", "RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 1\",", "'16': { 'lc': { 'NM-16ESW': { 'descr': '16 Port 10BaseT/100BaseTX", "1\", \"pid\": \"VS-F6K-PFC4\", \"sn\": \"SAL17163901\", \"vid\": \"V03\", }, }, \"4\":", "Clock FRU 2\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\", }", "2 ports Supervisor Engine 720 Rev. 5.6\", \"pid\": \"WS-SUP720-3BXL\", \"vid\":", "\"invoked\": 3321960, \"usecs\": 109, \"tty\": 0, \"one_min_cpu\": 0.54, \"process\": \"PIM", "'slot': { '0': { 'rp': { 'C3900-SPE150/K9': { 'descr': 'Cisco", "FOC98675W3E NAME: \"Virtual Private Network (VPN) Module on Slot 0\",", "obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(subslot='0/1') self.maxDiff = None self.assertEqual(parsed_output,", "Mock(**self.empty_output) inventory_obj = ShowInventory(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = inventory_obj.parse() def", "Device(name='iosv') empty_output = {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\\ Directory", "'28', 'Ten Gigabit Ethernet': '2' }, 'mem_size': { 'flash-simulated non-volatile", "10Gbase-SR Te2/6\" PID: X2-10GB-SR , VID: V06 , SN: FNS153920YJ", "{ 'rp': { 'C3900-SPE150/K9': { 'descr': 'Cisco Services Performance Engine", "'name': 'Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2 SubSlot", "Rev. 3.4\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V04\", \"sn\": \"SAL14017TWF\", \"subslot\": {", "CLK-7600 , VID: , SN: FXS170802GL NAME: \"1\", DESCR: \"VS-SUP2T-10G", "IOSv , VID: 1.0, SN: 9K66Z7TOKAACDEQA24N7S '''} golden_parsed_output_2 = {", "4 Rev. 2.0\", \"name\": \"VS-F6K-PFC4 Policy Feature Card 4 EARL", "'securityk9', }, 'uc': { 'license_level': 'None', 'license_type': 'None', 'next_reload_license_level': 'None',", "responsible for compliance with U.S. and local country laws. By", "U (2091, 3086) on Slot 0 SubSlot 1', 'pid': 'WIC-1B-U-V2',", "CISCO3945-CHASSIS , VID: V05 , SN: FGL161010K8 NAME: \"Cisco Services", "FXS170802GL NAME: \"1\", DESCR: \"VS-SUP2T-10G 5 ports Supervisor Engine 2T", "= None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_golden_standby(self): self.device = Mock(**self.golden_output_standby) obj", "self.device1 = Mock(**self.empty_output) obj = ShowVersionRp(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "PWR-3900-AC , VID: V03 , SN: QCS1604P0BT '''} golden_parsed_output_5 =", "Version 15.6(3)M2, RELEASE SOFTWARE (fc2)\", \"uptime_in_curr_state\": \"1 day, 16 hours,", "1.0\", \"name\": \"WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of", "Mock(**self.empty_output) platform_obj = ShowPlatform(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def", "Slot 1 SubSlot 1\", DESCR: \"Six port FXO voice interface", "Mock(**self.golden_output_4) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_4) def", "Version = Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2,", "with Gigabit Ethernet on Slot 0', 'pid': 'CISCO3845-MB', 'sn': 'FOC729346GQ',", "\"X2 Transceiver 10Gbase-SR Te2/6\" PID: X2-10GB-SR , VID: V06 ,", "ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def", "\"FAN-MOD-4HS 1\": { \"other\": { \"FAN-MOD-4HS 1\": { \"name\": \"FAN-MOD-4HS", "test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def test_golden_active_opm(self): self.device = Mock(**self.golden_output_active_opm) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output", "ATA CompactFlash 2 (Read/Write) 10080K bytes of ATA CompactFlash 3", "\"WS-C1010XR-48FPS-I\" PID: WS-C1010XR-48FPS-I, VID: V05 , SN: FD2043B0K3 NAME: \"Switch", "'descr': 'ABC Power Supply', 'name': 'Switch 1 - Power Supply", "\"rp\": { \"VS-SUP2T-10G\": { \"name\": \"1\", \"descr\": \"VS-SUP2T-10G 5 ports", "00:00:00 +00:00 vios-adventerprisek9-m 268 -rw- 524288 Oct 17 2018 18:57:10", "}, 'slot': { '0': { 'rp': { 'CISCO3825': { 'subslot':", "\"vid\": \"V01\", }, } }, } } }, \"2\": {", "WS-SUP720-3BXL , VID: V05, SN: SAL11434P2C NAME: \"msfc sub-module of", "self.device = Mock(**self.golden_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) parsed_output = platform_obj.parse(", "Board Revision Number : 0x04 Switch Ports Model SW Version", "{ 'data': { 'license_level': 'datak9', 'license_type': 'Permanent', 'next_reload_license_level': 'datak9', },", "DSPs on Slot 0 SubSlot 4\", DESCR: \"PVDMII DSP SIMM", "'vid': 'V05 ', }, }, }, '2': { 'rp': {", "{ \"invoked\": 1466728, \"usecs\": 2442, \"tty\": 0, \"one_min_cpu\": 0.87, \"process\":", "Supply 1\", DESCR: \"BCA Power Supply\" PID: C3KX-PWR-007CBA , VID:", "WS-C6504-E , VID: V01, SN: FXS1712Q1R8 NAME: \"CLK-7600 1\", DESCR:", "''' NAME: \"3845 chassis\", DESCR: \"3845 chassis\" PID: CISCO3845 ,", "NAME: \"Wan Interface Card BRI U (2091, 3086) on Slot", "= Mock(**self.golden_output_active_ipm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active',", "number : 73-13061-04 Motherboard serial number : FDO1633Q14M Model revision", "{ \"rp\": { \"WS-SUP720-3BXL\": { \"name\": \"1\", \"descr\": \"WS-SUP720-3BXL 2", "Ethernet': '28', 'Ten Gigabit Ethernet': '2', 'Virtual Ethernet': '2', 'Gigabit", "bytes of packet buffer memory. 65536K bytes of Flash internal", "Part Number : 800-38990-01 Top Assembly Revision Number : F0", "version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k) def test_golden_ios_1(self): self.maxDiff = None self.dev_iosv =", "= Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { \"red_sys_info\":", "'vid': 'V00 ', }, 'WS-C1010XR-48FPS-I': { 'descr': 'WS-C1010XR-48FPS-I', 'name': '1',", "{'execute.return_value': ''' NAME: \"3845 chassis\", DESCR: \"3845 chassis\" PID: CISCO3845", "IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)\",", "Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675U0D', 'vid': 'V01", "\"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS1707008Y\", } } }, \"PS 2", "''} golden_parsed_output_iosv = { 'main': { 'chassis': { 'IOSv': {", ", VID: , SN: FXS181101V4 NAME: \"CLK-7600 2\", DESCR: \"OSR-7600", "FOC16050QP6 Technology Package License Information for Module:'c3900' ----------------------------------------------------------------- Technology Technology-package", "\"hostname\": \"N95_1\", \"os\": \"IOS\", \"version_short\": \"15.6\", \"number_of_intfs\": { \"Gigabit Ethernet\":", "SN: SAL11434LYG NAME: \"2\", DESCR: \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb", "'name': 'Clear/Subrate T3/E3 WAN on Slot 1', 'pid': 'NM-1T3/E3=', 'sn':", "PID: WS-C6503-E , VID: V03, SN: FXS1821Q2H9 NAME: \"CLK-7600 1\",", "NAME: \"2\", DESCR: \"WS-C3210X-48\" PID: WS-C3210X-48T-S , VID: V02 ,", "\"Transceiver Te2/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T5\", \"vid\": \"V06 \", }", "\"last_modified_date\": \"Mar 29 2017 00:00:00 +00:00\", \"index\": \"267\", \"size\": \"147988420\",", "{ \"descr\": \"WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0\", \"name\":", "}, }, }, } golden_output_9 = {'execute.return_value': ''' NAME: \"3845", "Transceiver 10Gbase-SR Te2/5\", \"name\": \"Transceiver Te2/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"AGA1515XZE2\",", "21:57:23 UTC Sat Aug 28 2010 (SP by power on)", "datak9 Permanent datak9 Configuration register is 0x2102 '''} parsed_output =", "= obj.parse(rp='active', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_golden_standby(self):", "Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)', 'hostname': 'sample_switch', 'uptime': '8 weeks,", "\"descr\": \"Cisco Systems Catalyst 6500 3-slot Chassis System\", \"pid\": \"WS-C6503-E\",", "V03, SN: FXS1821Q2H9 NAME: \"CLK-7600 1\", DESCR: \"OSR-7600 Clock FRU", "self.golden_parsed_output_serdes_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdesInternal(device=self.device1) with", "'16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2', 'pid': 'NM-16ESW', 'sn':", "Te2/3\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202UU\", \"vid\": \"V06 \", } },", "Part Number : 800-33746-04 Top Assembly Revision Number : B0", "\"descr\": \"WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2\", \"name\": \"WS-F6K-DFC4-E", "\"ONT170202UU\", \"vid\": \"V06 \", } }, \"4\": { \"X2-10GB-SR\": {", "-rw- 119 Oct 17 2018 18:57:18 +00:00 e1000_bia.txt 2142715904 bytes", "Mock(**self.empty_output) obj = ShowPlatformHardwareSerdes(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class", "to ROM by power cycle at 21:57:23 UTC Sat Aug", "SN: FOC135464KO NAME: \"Gigabit(1000BaseT) module for EtherSwitch NM on Slot", "Transceiver 10Gbase-SR Te2/1\" PID: X2-10GB-SR , VID: V06 , SN:", "Cisco 3900 ISR\" PID: C3900-SPE150/K9 , VID: V05 , SN:", "976, \"tty\": 0, \"one_min_cpu\": 0.07, \"process\": \"OSPF-1 Hello\", \"five_min_cpu\": 0.07,", "Level: ipservices cisco WS-C3750X-24S (PowerPC405) processor (revision A0) with 524288K", "FRU 1\" PID: CLK-7600 , VID: , SN: FXS170802GL NAME:", "parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_serdes) def test_empty(self):", "'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)',", "\"other\": { \"CLK-7600 2\": { \"name\": \"CLK-7600 2\", \"descr\": \"OSR-7600", "\"permissions\": \"-rw-\" }, \"boot\": { \"last_modified_date\": \"Jan 30 2013 00:00:00", "', }, }, }, 'vid': 'V04 ', }, }, },", "DESCR: \"CISCO3945-CHASSIS\" PID: CISCO3945-CHASSIS , VID: V05 , SN: FGL161010K8", "\"High Speed Fan Module for CISCO7604 1\", \"pid\": \"FAN-MOD-4HS\", \"vid\":", "NAME: \"Switch 2 - Power Supply 1\", DESCR: \"BCA Power", "\"subslot\": { \"0\": { \"WS-F6K-DFC4-E\": { \"descr\": \"WS-F6K-DFC4-E Distributed Forwarding", "WS-F6700-CFC , VID: V06, SN: SAL13516QS8 NAME: \"FAN-MOD-4HS 1\", DESCR:", "72 bits wide with parity disabled. 256K bytes of non-volatile", "test_golden_ios_2(self): self.maxDiff = None self.dev_iosv = Mock(**self.device_output) version_obj = ShowVersion(device=self.dev_iosv)", "'vid': 'V01 ', }, }, }, 'vid': 'V01 ', },", "ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_serdes_internal)", "'V02 ', }, 'PWR-C2-2929WAC': { 'descr': 'LLL Power Supply', 'name':", "parsered_output = version_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) version_obj =", "'ipservices', 'license_type': 'Permanent', 'next_reload_license_level': 'ipservices', 'chassis': 'WS-C3750X-24S', 'main_mem': '524288', 'processor_type':", "00:32 by prod_rel_team ROM: System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE", "Distributed Forwarding Card 4 EARL sub-module of 2\", DESCR: \"WS-F6K-DFC4-E", "'SM-ES2-16-P', 'name': '1', 'pid': 'SM-ES2-16-P', 'sn': 'FOC09876NP3', 'vid': '', },", "\"pid\": 412, \"five_sec_cpu\": 0.15 } }, \"five_sec_cpu_total\": 4, \"five_min_cpu\": 9,", "= obj.parse(key_word='process') self.assertEqual(parsed_output, self.golden_parsed_output_1) def test_empty(self): self.device1 = Mock(**self.empty_output) obj", "None self.assertEqual( parsed_output, self.golden_parsed_output_serdes_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj", "reason: Reload Command This product contains cryptographic features and is", "(PowerPC405) processor (revision A0) with 524288K bytes of memory. Processor", "'4': { 'PVDM2-64': { 'descr': 'PVDMII DSP SIMM with four", "{ \"last_modified_date\": \"Oct 17 2018 18:57:18 +00:00\", \"index\": \"269\", \"size\":", "memory. Processor board ID FDO1633Q14S Last reset from power-on 14", "C3900-SPE150/K9 , VID: V05 , SN: FOC16050QP6 NAME: \"Two-Port Fast", "revision number : W0 Motherboard revision number : B0 Model", "DESCR: \"VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0\" PID: VS-F6K-PFC4", "test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowProcessesCpuHistory(device=self.device)", "voice interface daughtercard on Slot 1 SubSlot 1\", DESCR: \"Six", "}, }, }, }, } golden_output_9 = {'execute.return_value': ''' NAME:", "FXS170802GL NAME: \"CLK-7600 2\", DESCR: \"OSR-7600 Clock FRU 2\" PID:", "Daughterboard Rev. 3.1\", \"name\": \"msfc sub-module of 1\", \"pid\": \"WS-SUP720\",", "= slot 0 Current Software state = ACTIVE Uptime in", "'CISCO3945-CHASSIS': { 'descr': 'CISCO3945-CHASSIS', 'name': 'CISCO3945-CHASSIS', 'pid': 'CISCO3945-CHASSIS', 'sn': 'FGL161010K8',", "V05 , SN: FD2043B0K3 NAME: \"Switch 1 - Power Supply", "ShowProcessesCpuHistory(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff =", "06:26 by kellythw Image text-base: 0x40101040, data-base: 0x42D98000 ROM: System", "System CompactFlash 0 (Read/Write) License Info: License UDI: ------------------------------------------------- Device#", "1.0', 'name': 'IOSv', 'pid': 'IOSv', 'sn': '9K66Z7TOKAACDEQA24N7S', 'vid': '1.0', },", "', }, }, }, }, } golden_output_5 = {'execute.return_value': '''", "{ 'chassis': { 'IOSv': { 'descr': 'IOSv chassis, Hw Serial#:", "'pid': 'SFP-10G-LR', 'sn': 'ONT182746GZ', 'vid': 'V02 ', }, }, },", "{ \"name\": \"PS 1 PWR-2700-AC/4\", \"descr\": \"2700W AC power supply", "self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitch(device=self.dev_c3850) parsed_output", ", VID: V05 , SN: FGL161010K8 NAME: \"Cisco Services Performance", "EST Mon Dec 9 2019', 'uptime': '1 hour, 20 minutes',", "}, } golden_output_6 = {'execute.return_value': ''' NAME: \"1\", DESCR: \"SM-ES2-16-P\"", "SOFTWARE (fc1) sample_switch uptime is 8 weeks, 3 days, 10", "data datak9 Permanent datak9 Configuration register is 0x2102 '''} parsed_output", "ID FDO2028F1WK Last reset from power-on 2 Virtual Ethernet interfaces", "text-base: 0x40101040, data-base: 0x42D98000 ROM: System Bootstrap, Version 12.2(17r)S4, RELEASE", "= Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active',", "{ 'rp': { 'WS-C0123X-45T-S': { 'descr': 'WS-C8888X-88', 'name': '1', 'pid':", "\"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/5\", \"name\": \"Transceiver Te2/5\",", "'os': 'IOS', 'platform': 'C3750E', 'processor_type': 'PowerPC405', 'returned_to_rom_by': 'power-on', 'rom': 'Bootstrap", "ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_6) def test_golden_output_7(self): self.maxDiff =", "'V09 ', }, }, 'other': { 'AIM-VPN/SSL-3': { 'descr': 'Encryption", "ID 9K66Z7TOKAACDEQA24N7S 6 Gigabit Ethernet interfaces DRAM configuration is 72", "Mon Dec 9 2019', 'uptime': '1 hour, 20 minutes', 'version':", "(c) 1986-2018 by Cisco Systems, Inc. Compiled Mon 22-Jan-18 04:07", "parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe):", "1', 'name': 'C3900 AC Power Supply 1', 'pid': 'PWR-3900-AC', 'sn':", "uptime = 0 minutes Switchovers system experienced = 0 Standby", "Supply 1\", DESCR: \"LLL Power Supply\" PID: PWR-C2-2929WAC , VID:", "Mon Dec 9 2019 System restarted at 10:27:57 EST Mon", "= ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe):", "C1010X-STACK , VID: V02 , SN: FD232323XXZ NAME: \"GigabitEthernet1/0/49\", DESCR:", "\"bytes_total\": \"2142715904\", \"bytes_free\": \"1989595136\" }, \"dir\": \"flash0:/\" } } golden_output_iosv", "Standby failures = 0 Last switchover reason = unsupported Hardware", "- Power Supply 1\", DESCR: \"ABC Power Supply\" PID: C3KX-PWR-350WAC", "{'execute.return_value':''' best-c3945-IOS3#show version Cisco IOS Software, C3900 Software (C3900-UNIVERSALK9-M), Version", "4\", DESCR: \"WS-F6700-CFC Centralized Forwarding Card Rev. 4.1\" PID: WS-F6700-CFC", "'SFP-10GBase-SR', 'name': 'TenGigabitEthernet1/1/1', 'pid': 'SFP-10G-SR', 'sn': 'SPC1519005V', 'vid': 'V03 ',", ", SN: ONT170202T1 NAME: \"Transceiver Te1/5\", DESCR: \"X2 Transceiver 10Gbase-SR", "import unittest from unittest.mock import Mock from pyats.topology import Device", "ShowPlatform(device=self.dev2) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden_c3850(self): self.maxDiff =", "- T1/E1\" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC65428K9F", "The password-recovery mechanism is enabled. 512K bytes of flash-simulated non-volatile", "\"VS-F6K-PFC4\", \"sn\": \"SAL17163901\", \"vid\": \"V03\", }, }, \"4\": { \"X2-10GB-SR\":", "RJ-48 Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48", "Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c)", "', }, }, }, }, } golden_output_9 = {'execute.return_value': '''", "\"main\": { \"chassis\": { \"WS-C6503-E\": { \"name\": \"WS-C6503-E\", \"descr\": \"Cisco", "'''} golden_parsed_output_iosv = { \"dir\": { \"flash0:/\": { \"files\": {", "28 Gigabit Ethernet interfaces 2 Ten Gigabit Ethernet interfaces The", "'CISCO3845', 'sn': 'FTX6666ARJ9', 'vid': 'V05 ', }, }, }, 'slot':", "obj.parse(key_word='process') self.assertEqual(parsed_output, self.golden_parsed_output_1) def test_empty(self): self.device1 = Mock(**self.empty_output) obj =", "Mock(**self.golden_output_standby_offline) obj = ShowVersionRp(device=self.device) self.maxDiff = None with self.assertRaises(SchemaEmptyParserError): parsed_output", "SOFTWARE (fc1)', 'chassis': 'WS-C3750X-24P', 'chassis_sn': 'FDO2028F1WK', 'curr_config_register': '0xF', 'compiled_by': 'prod_rel_team',", "2\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS17070093\", } } },", "test_golden_output_8(self): self.maxDiff = None self.device = Mock(**self.golden_output_8) obj = ShowInventory(device=self.device)", "- 8FXS/DID on Slot 1\", DESCR: \"High Density Voice Module", "self.assertRaises(SchemaEmptyParserError): parsed_output = redundancy_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv", "{ \"sort\": { 1: { \"invoked\": 3321960, \"usecs\": 109, \"tty\":", "= {'execute.return_value': ''' NAME: \"3845 chassis\", DESCR: \"3845 chassis\" PID:", "Information for Module:'c3900' ----------------------------------------------------------------- Technology Technology-package Technology-package Current Type Next", "23%; five minutes: 15% '''} golden_parsed_output_1 = { \"sort\": {", "\"name\": \"1\", \"descr\": \"WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev.", "def test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj =", ", VID: V02, SN: DCH183500KW NAME: \"PS 1 PWR-1400-AC\", DESCR:", "with 435457K/87040K bytes of memory. Processor board ID 9K66Z7TOKAACDEQA24N7S 6", "{ \"name\": \"1\", \"descr\": \"VS-SUP2T-10G 5 ports Supervisor Engine 2T", "\"SAL11434P2C\", \"subslot\": { \"0\": { \"WS-SUP720\": { \"descr\": \"WS-SUP720 MSFC3", "\", } }, }, } } }, \"2\": { \"lc\":", "\"WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0\" PID: WS-X6816-10GE ,", ", SN: FD5678Z90P NAME: \"Switch 2 - Power Supply 1\",", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_9) class test_show_bootvar(unittest.TestCase): dev = Device(name='ios')", "'license_level': 'datak9', 'license_type': 'Permanent', 'next_reload_license_level': 'datak9', }, 'ipbase': { 'license_level':", "def test_golden_subslot(self): self.device = Mock(**self.golden_output_subslot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output =", "Last reload reason: Reload Command This product contains cryptographic features", "self.golden_parsed_output_1) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowProcessesCpu(device=self.device1) with", "semi_empty_output = {'execute.return_value': '''\\ ROM: Bootstrap program is IOSv '''}", "obj.parse(status='active', slot='0') class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def test_golden_active_ipm(self): self.device = Mock(**self.golden_output_active_ipm) obj", "\"image_type\": \"production image\", \"rom\": \"System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE", "test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def test_golden_port(self): self.device = Mock(**self.golden_output_port) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output", "} } }, \"FAN-MOD-4HS 1\": { \"other\": { \"FAN-MOD-4HS 1\":", "\"WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1\" PID: WS-F6700-DFC3CXL ,", "loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE", "SN: FOC09876NP3 '''} golden_parsed_output_6 = { 'slot': { '1': {", "'pid': 'WS-C1010XR-48FPS-I', 'sn': 'FD2043B0K3', 'subslot': { '1': { 'C1010X-STACK': {", "with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def test_golden_port(self): self.device", "\"Unknown reason\", \"hostname\": \"N95_1\", \"os\": \"IOS\", \"version_short\": \"15.6\", \"number_of_intfs\": {", "IOS (tm) s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)", "'sn': 'FOC85389QXB', 'vid': 'V03 ', }, }, }, 'vid': 'V04", "3086) on Slot 0 SubSlot 1', 'pid': 'WIC-1B-U-V2', 'sn': '10293847',", "------------------------------------------------- Device# PID SN ------------------------------------------------- *0 C3900-SPE150/K9 FOC16050QP6 Technology Package", "test_show_module_iosxe,\\ TestShowSwitch as test_show_switch_iosxe,\\ TestShowSwitchDetail as test_show_switch_detail_iosxe class TestShowVersion(unittest.TestCase): dev1", "with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden_c3850(self): self.maxDiff = None", "obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_serdes) def test_empty(self): self.device1 =", "information is not available because it is in 'DISABLED' state", "Card EARL sub-module of 4\", DESCR: \"WS-F6700-CFC Centralized Forwarding Card", "Image ------ ----- ----- ---------- ---------- * 1 30 WS-C3750X-24P", "'sn': 'FDO123R12W', 'subslot': { '1': { 'C3KX-PWR-350WAC': { 'descr': 'ABC", "obj = ShowPlatformSoftwareStatusControl(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self):", "Version 3.0.0. Bridging software. TN3270 Emulation software. 1 Virtual Ethernet/IEEE", "self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_version_rp(test_show_version_rp_iosxe): def test_golden_active(self): self.device =", "\"Switch 1 - FlexStackPlus Module\", DESCR: \"Stacking Module\" PID: C1010X-STACK", "'Permanent', 'next_reload_license_level': 'securityk9', }, 'uc': { 'license_level': 'None', 'license_type': 'None',", "encryption. Importers, exporters, distributors and users are responsible for compliance", "is \"flash0:c3900-universalk9-mz.SPA.150-1.M7.bin\" Last reload type: Normal Reload Last reload reason:", "VID: V01, SN: SAL1214LAG5 NAME: \"WS-C6503-E-FAN 1\", DESCR: \"Enhanced 3-slot", "Cisco Systems, Inc. Compiled Fri 05-Aug-11 00:32 by prod_rel_team ROM:", "def test_golden_asr1k(self): self.maxDiff = None self.dev_asr1k = Mock(**self.golden_output_asr1k) platform_obj =", "C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_1 = { 'version':", "Card 4 EARL sub-module of 2\", \"pid\": \"WS-F6K-DFC4-E\", \"sn\": \"SAL171846RF\",", "}, }, 'vid': 'V05 ', }, }, }, '2': {", "}, \"slot\": { \"slot 0\": { \"image_ver\": \"Cisco IOS Software,", "\"WS-X6748-GE-TX\", \"vid\": \"V04\", \"sn\": \"SAL14017TWF\", \"subslot\": { \"0\": { \"WS-F6700-CFC\":", "hours, 3 minutes', 'version': '12.2(55)SE8', 'version_short': '12.2' } } golden_output_ios", "V02 , SN: FD5678Z90P NAME: \"Switch 2 - Power Supply", "ShowPlatformPower(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def test_empty(self):", "'pid': 'PVDM2-64', 'sn': 'FOC63358WSI', 'vid': 'V01 ', }, }, },", "'descr': 'CISCO3945-CHASSIS', 'name': 'CISCO3945-CHASSIS', 'pid': 'CISCO3945-CHASSIS', 'sn': 'FGL161010K8', 'vid': 'V05", "= { 'version': { 'chassis': 'CISCO3945-CHASSIS', 'chassis_sn': 'FGL161010K8', 'compiled_by': 'prod_rel_team',", "}, }, '1': { 'WIC-1B-U-V2': { 'descr': 'Wan Interface Card", "\"number_of_intfs\": { \"Gigabit Ethernet\": \"6\" }, \"version\": \"15.6(3)M2\", \"rtr_type\": \"IOSv\",", "Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk", "watt 1\" PID: PWR-1400-AC , VID: V01, SN: ABC0830J127 '''}", "PWR-1400-AC , VID: V01, SN: ABC0830J127 '''} golden_output_4 = {'execute.return_value':", "', }, }, }, }, }, }, }, } golden_output_8", "= Mock(**self.empty_output) obj = ShowProcessesCpu(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse()", "ACTIVE Uptime in current state = 1 day, 16 hours,", "'10293847', 'vid': 'V01', }, }, '4': { 'PVDM2-64': { 'descr':", "Cache Last reset from s/w reset SuperLAT software (copyright 1990", "obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) def test_empty(self): self.device1", "by reload System image file is \"flash0:/vios-adventerprisek9-m\" Last reload reason:", "DESCR: \"Cisco Systems Catalyst 6500 3-slot Chassis System\" PID: WS-C6503-E", "'65536K', \"number_of_intfs\": { \"Gigabit Ethernet/IEEE 802.3\": \"50\", 'Virtual Ethernet/IEEE 802.3':", "= { \"five_sec_cpu_total\": 13, \"five_min_cpu\": 15, \"one_min_cpu\": 23, \"five_sec_cpu_interrupts\": 0", "self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdesInternal(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "}, \"vios-adventerprisek9-m\": { \"last_modified_date\": \"Mar 29 2017 00:00:00 +00:00\", \"index\":", "= ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_1) def test_golden_ios_2(self): self.maxDiff", "hours, 3 minutes System returned to ROM by power-on System", "board ID FDO1633Q14S Last reset from power-on 14 Virtual Ethernet", "test_golden_ios_cat6k(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_cat6k) version_obj = ShowVersion(device=self.dev_iosv)", "}, 'vid': 'V02 ', }, }, }, }, } golden_output_5", "is C3750E boot loader', 'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version", "'chassis': { 'IOSv': { 'descr': 'IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S,", "Card 4 Rev. 1.0\" PID: WS-F6K-DFC4-A , VID: V04, SN:", "platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def test_golden(self): self.device = Mock(**self.golden_output)", "\"sn\": \"SAL1214LAG5\", \"vid\": \"V01\", } } }, } } },", "http://www.cisco.com/techsupport Copyright (c) 1986-2011 by Cisco Systems, Inc. Compiled Fri", "def test_golden_active_ipm(self): self.device = Mock(**self.golden_output_active_ipm) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output =", "by sending email to <EMAIL>. Cisco CISCO3945-CHASSIS (revision 1.1) with", "VID: V04 , SN: FOC65798TG8 NAME: \"Six port FXO voice", "VID: , SN: SAL17142D06 NAME: \"VS-F6K-PFC4 Policy Feature Card 4", "CONFIG_FILE variable = BOOTLDR variable = Configuration register is 0x2012", "'reload', 'rom': 'System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)', 'rtr_type':", "country laws. By using this product you agree to comply", "'V03 ', }, }, }, 'vid': 'V00 ', }, 'WS-C1010XR-48FPS-I':", "TestShowVersion(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value':", "data-base: 0x02800000 ROM: Bootstrap program is C3750E boot loader BOOTLDR:", "None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output = obj.parse()", "'SFP-10G-LR': { 'descr': 'SFP-10GBase-LR', 'name': 'TenGigabitEthernet2/1/1', 'pid': 'SFP-10G-LR', 'sn': 'ONT182746GZ',", "= obj.parse(rp='standby', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_standby) def test_golden_standby_offline(self):", "\"CISCO3945-CHASSIS\", DESCR: \"CISCO3945-CHASSIS\" PID: CISCO3945-CHASSIS , VID: V05 , SN:", "\"PS 1 PWR-1400-AC\", DESCR: \"AC power supply, 1400 watt 1\"", "number : FDO2028F1WK Top Assembly Part Number : 800-38990-01 Top", "10Gbase-SR Te2/16\" PID: X2-10GB-SR , VID: V06 , SN: ONT170201TT", "engine sub-module of 1\", DESCR: \"WS-F6K-PFC3BXL Policy Feature Card 3", "None self.device = Mock(**self.golden_output_2) obj = ShowInventory(device=self.device) parsed_output = obj.parse()", "{ \"PS 2 PWR-2700-AC/4\": { \"name\": \"PS 2 PWR-2700-AC/4\", \"descr\":", "Ethernet High Speed WAN Interface Card\" PID: HWIC-2FE , VID:", "parsed_output = obj.parse(port='0/0/0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_port) def test_golden_slot(self):", "Mock(**self.empty_output) obj = ShowPlatformHardwareSerdesInternal(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class", "SFP Rev. 1.0\", \"pid\": \"WS-X6824-SFP\", \"vid\": \"V01\", \"sn\": \"SAL17152EG9\", \"subslot\":", "cat6k_tb1 switched to active is 10 weeks, 5 days, 5", "Aug 28 2010 (SP by power on) System image file", "{ \"0\": { \"WS-F6700-CFC\": { \"descr\": \"WS-F6700-CFC Centralized Forwarding Card", "Slot 2 SubSlot 0', 'pid': 'GE-DCARD-ESW', 'sn': 'FOC91864MNN', 'vid': 'V01", "DESCR: \"X2 Transceiver 10Gbase-SR Te2/3\" PID: X2-10GB-SR , VID: V06", "\"WS-C6503-E\", \"descr\": \"Cisco Systems Catalyst 6500 3-slot Chassis System\", \"pid\":", "'CISCO3845-MB', 'sn': 'FOC729346GQ', 'vid': 'V09 ', }, }, 'other': {", "parsed_output = platform_obj.parse() def test_golden_c3850(self): self.maxDiff = None self.dev_c3850 =", "ONT1702033D NAME: \"2\", DESCR: \"WS-X6816-10GE CEF720 16 port 10GE Rev.", "test_show_platform_hardware_plim_iosxe,\\ TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\\ TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\\ TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\\", "reason = unsupported Hardware Mode = Simplex Maintenance Mode =", "Supply\" PID: C3KX-PWR-007CBA , VID: V01L , SN: LTP13579L3R NAME:", "Card BRI U (2091, 3086) on Slot 0 SubSlot 1',", "FRU 1\" PID: CLK-7600 , VID: , SN: FXS181101V4 NAME:", "None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_golden_standby(self): self.device = Mock(**self.golden_output_standby) obj =", "1 FastEthernet interface 28 Gigabit Ethernet interfaces 2 Ten Gigabit", "Engine 720 Rev. 5.6\" PID: WS-SUP720-3BXL , VID: V05, SN:", "DESCR: \"3845 chassis\" PID: CISCO3845 , VID: V05 , SN:", "PWR-2700-AC/4\", \"descr\": \"2700W AC power supply for CISCO7604 1\", \"pid\":", "09:56', 'hostname': 'R5', 'image_id': 'C3750E-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'power-on',", "\"SAL17152QB3\", \"subslot\": { \"0\": { \"WS-F6K-DFC4-E\": { \"descr\": \"WS-F6K-DFC4-E Distributed", "}, \"PS 1 PWR-2700-AC/4\": { \"other\": { \"PS 1 PWR-2700-AC/4\":", "self.device = Mock(**self.golden_output_3) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output,", "register is 0x2102 '''} parsed_output = { 'version': { 'chassis':", "0.07, \"process\": \"OSPF-1 Hello\", \"five_min_cpu\": 0.07, \"runtime\": 113457, \"pid\": 412,", "(R7000) processor (revision 1.4) with 983008K/65536K bytes of memory. Processor", "'platform': 'C3750E', 'version': '15.2(2)E8', 'image_id': 'C3750E-UNIVERSALK9-M', 'os': 'IOS', 'image_type': 'production", "= ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', slot='0',", "'''} def test_empty(self): self.dev1 = Mock(**self.empty_output) dir_obj = Dir(device=self.dev1) with", "26-Jun-13 09:56', 'hostname': 'R5', 'image_id': 'C3750E-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason':", "Power Supply 1': { 'descr': 'C3900 AC Power Supply 1',", "self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(rp='standby', status='running') def test_empty(self): self.device1 = Mock(**self.empty_output)", "Tue Sep 10 2019', 'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason': 'power-on', 'license_level': 'ipservices',", "2 PWR-2700-AC/4\": { \"other\": { \"PS 2 PWR-2700-AC/4\": { \"name\":", "150 for Cisco 3900 ISR\" PID: C3900-SPE150/K9 , VID: V05", "as test_show_platform_software_slot_active_monitor_Mem_iosxe,\\ TestShowPlatformHardware as test_show_platform_hardware_iosxe,\\ TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\\ TestShowPlatformHardwareQfpBqsOpmMapping as", "'compiled_date': 'Wed 29-Mar-17 14:05', \"processor_type\": \"revision 1.0\", \"platform\": \"IOSv\", \"image_type\":", "}, \"five_sec_cpu_total\": 4, \"five_min_cpu\": 9, \"one_min_cpu\": 4, \"nonzero_cpu_processes\": [ \"PIM", "(Read/Write) 10080K bytes of ATA CompactFlash 3 (Read/Write) Configuration register", "transfer and use. Delivery of Cisco cryptographic products does not", "TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\\ TestShowEnv as test_show_env_iosxe,\\ TestShowModule as test_show_module_iosxe,\\ TestShowSwitch", "CLEI Code Number : CMMPP00DRB Hardware Board Revision Number :", "ID FGL161010K8 2 FastEthernet interfaces 3 Gigabit Ethernet interfaces 1", "in 'DISABLED' state '''} def test_empty(self): self.dev1 = Mock(**self.empty_output) redundancy_obj", ", VID: V03 , SN: SPC1519005V NAME: \"2\", DESCR: \"WS-C3210X-48\"", "\"Cisco Services Performance Engine 150 for Cisco 3900 ISR\" PID:", "License Information for Module:'c3900' ----------------------------------------------------------------- Technology Technology-package Technology-package Current Type", "\"V06 \", } }, \"2\": { \"X2-10GB-SR\": { \"descr\": \"X2", "SN: FOC65428K9F NAME: \"Wan Interface Card BRI U (2091, 3086)", "\"3\", \"descr\": \"WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0\",", "\"APS1707008Y\", } } }, \"PS 2 PWR-2700-AC/4\": { \"other\": {", "CISCO3845-MB , VID: V09 , SN: FOC729346GQ NAME: \"Virtual Private", "unable to comply with U.S. and local laws, return this", "{ \"0\": { \"WS-F6K-DFC4-E\": { \"descr\": \"WS-F6K-DFC4-E Distributed Forwarding Card", "= ShowPlatform(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_semi_empty(self): self.dev2", "self.assertRaises(SchemaEmptyParserError): parsered_output = dir_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) dir_obj", "not ready to show bootvar '''} def test_empty(self): self.dev =", "'QCS1604P0BT', 'vid': 'V03 ', }, }, }, }, } golden_output_6", "\"Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2 SubSlot 0\",", "05-Aug-11 00:32', 'curr_config_register': '0x2102', 'hostname': 'best-c3945-IOS3', 'image_id': 'C3900-UNIVERSALK9-M', 'image_type': 'production", "\"PS 1 PWR-2700-AC/4\", \"descr\": \"2700W AC power supply for CISCO7604", "= None self.device = Mock(**self.golden_output_2) obj = ShowInventory(device=self.device) parsed_output =", "obj = ShowVersionRp(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform(test_show_platform_iosxe):", "\"IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0\" PID: IOSv", "}, '2': { 'rp': { 'WS-C3210X-48T-S': { 'descr': 'WS-C3210X-48', 'name':", "0x2012 Standby not ready to show bootvar '''} def test_empty(self):", "'Ten Gigabit Ethernet': '2' }, 'mem_size': { 'flash-simulated non-volatile configuration':", "None self.device = Mock(**self.golden_output_4) obj = ShowInventory(device=self.device) parsed_output = obj.parse()", "obj.parse(rp='standby', status='running') def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowVersionRp(device=self.device1)", "PID: CISCO3845-MB , VID: V09 , SN: FOC729346GQ NAME: \"Virtual", "VID: V01 , SN: FOC135464KO NAME: \"Gigabit(1000BaseT) module for EtherSwitch", "Configuration register is 0x0'''} golden_parsed_output_ios = { 'version': {'bootldr': 'C3750E", "PID: PWR-2700-AC/4 , VID: V03, SN: APS1707008Y NAME: \"PS 2", "VID: V05 , SN: FD2043B0K3 NAME: \"Switch 1 - Power", "'lc': { 'SM-ES2-16-P': { 'descr': 'SM-ES2-16-P', 'name': '1', 'pid': 'SM-ES2-16-P',", "'other': { 'EVM-HD-8FXS/DID': { 'descr': 'High Density Voice Module -", "days, 5 hours, 16 minutes Time since cat6k_tb1 switched to", "state '''} def test_empty(self): self.dev1 = Mock(**self.empty_output) redundancy_obj = ShowRedundancy(device=self.dev1)", "uptime is 1 day, 16 hours, 42 minutes System returned", "\"FXS1821Q2H9\", } } }, \"slot\": { \"CLK-7600 1\": { \"other\":", "cat6k_tb1 uptime is 10 weeks, 5 days, 5 hours, 16", "with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') def test_golden_active_ipm(self):", "parity enabled. 255K bytes of non-volatile configuration memory. 2000880K bytes", "'descr': 'Clear/Subrate T3/E3 WAN', 'name': 'Clear/Subrate T3/E3 WAN on Slot", "\"pid\": \"WS-F6K-DFC4-A\", \"sn\": \"SAL171848KL\", \"vid\": \"V04\", } } }, }", "'production image', 'compiled_date': 'Mon 22-Jan-18 04:07', 'compiled_by': 'prod_rel_team', 'rom': 'Bootstrap", "'EVM-HD-8FXS/DID', 'sn': 'FOC65798TG8', 'subslot': { '1': { 'EM-HDA-6FXO': { 'descr':", "}, }, }, 'slot': { '0': { 'other': { 'AIM-VPN/SSL-2':", "def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdes(device=self.device1) with self.assertRaises(SchemaEmptyParserError):", "= { 'slot': { '1': { 'lc': { 'SM-ES2-16-P': {", "0 SubSlot 0\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk", "voice interface daughtercard on Slot 1 SubSlot 1', 'pid': 'EM-HDA-6FXO',", "Runtime(ms) Invoked uSecs 5Sec 1Min 5Min TTY Process 368 362874", "PID: SFP-10G-SR , VID: V03 , SN: SPC1519005V NAME: \"2\",", "\"descr\": \"High Speed Fan Module for CISCO7604 1\", \"pid\": \"FAN-MOD-4HS\",", "0\", DESCR: \"Gigabit(1000BaseT) module for EtherSwitch NM\" PID: GE-DCARD-ESW ,", "= ShowPlatformHardwareSerdes(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_serdes)", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_5) def test_golden_output_6(self): self.maxDiff = None self.device =", "reason: Reload command This product contains cryptographic features and is", "def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwarePlim(device=self.device1) with self.assertRaises(SchemaEmptyParserError):", "10Gbase-SR Te2/1\" PID: X2-10GB-SR , VID: V06 , SN: ONT17020338", "{ 'GE-DCARD-ESW': { 'descr': 'Gigabit(1000BaseT) module for EtherSwitch NM', 'name':", "def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj =", "PID: NM-1T3/E3= , VID: V01 , SN: FOC28476ADM NAME: \"16", "} }, \"bytes_total\": \"2142715904\", \"bytes_free\": \"1989595136\" }, \"dir\": \"flash0:/\" }", "', }, }, '1/1/1': { 'SFP-10G-SR': { 'descr': 'SFP-10GBase-SR', 'name':", "\"name\": \"CLK-7600 2\", \"descr\": \"OSR-7600 Clock FRU 2\", \"pid\": \"CLK-7600\",", "Technology-package Technology-package Current Type Next reboot ------------------------------------------------------------------ ipbase ipbasek9 Permanent", "device=self.device) parsed_output = platform_obj.parse(status='active') self.assertEqual(parsed_output, self.golden_parsed_output_active) class test_show_env(test_show_env_iosxe): def test_empty(self):", "- T1/E1\" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675U0D", "\"uptime\": \"10 weeks, 5 days, 5 hours, 16 minutes\", \"system_image\":", "e1000\", \"five_min_cpu\": 2.77, \"runtime\": 3582279, \"pid\": 84, \"five_sec_cpu\": 0.55 },", "cryptographic products does not imply third-party authority to import, export,", "NAME: \"VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1\",", "'returned_to_rom_at': '21:57:23 UTC Sat Aug 28 2010', 'returned_to_rom_by': 'power cycle',", "\"CLK-7600 2\": { \"name\": \"CLK-7600 2\", \"descr\": \"OSR-7600 Clock FRU", "\"FAN-MOD-4HS 1\", DESCR: \"High Speed Fan Module for CISCO7604 1\"", "DESCR: \"WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6\" PID:", "'''} parsed_output = { 'version': { 'chassis': 'CISCO3945-CHASSIS', 'chassis_sn': 'FGL161010K8',", ": 73-13061-04 Motherboard serial number : FDO1633Q14M Model revision number", "\"chassis_sn\": \"FXS1821Q2H9\", \"last_reload_reason\": \"s/w reset\", 'processor_board_flash': '65536K', \"number_of_intfs\": { \"Gigabit", "FRU 2\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\", } }", "VID: V06 , SN: ONT170201TT NAME: \"3\", DESCR: \"WS-X6824-SFP CEF720", "\"vid\": \"V06 \", } }, \"5\": { \"X2-10GB-SR\": { \"descr\":", "port FXO voice interface daughtercard\" PID: EM-HDA-6FXO , VID: V03", "6500 4-slot Chassis System\", \"pid\": \"WS-C6504-E\", \"vid\": \"V01\", \"sn\": \"FXS1712Q1R8\",", ", SN: FGL161010K8 NAME: \"Cisco Services Performance Engine 150 for", "16 port 10GE Rev. 2.0\", \"pid\": \"WS-X6816-10GE\", \"vid\": \"V02\", \"sn\":", "'power-on', 'rom': 'Bootstrap program is C3750E boot loader', 'rtr_type': 'WS-C3750X-24P',", "CompactFlash 0 (Read/Write) License Info: License UDI: ------------------------------------------------- Device# PID", "test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowProcessesCpu(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "= Mock(**self.empty_output) platform_obj = ShowPlatform(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse()", "interfaces 1917K bytes of non-volatile configuration memory. 8192K bytes of", "Power Supply 1\", DESCR: \"LLL Power Supply\" PID: PWR-C2-2929WAC ,", "1.0\" PID: IOSv , VID: 1.0, SN: 9K66Z7TOKAACDEQA24N7S '''} golden_parsed_output_2", "Engine 2T 10GE w/ CTS Rev. 1.5\" PID: VS-SUP2T-10G ,", "\"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\", } } }, \"CLK-7600 2\":", "', }, }, }, 'vid': 'V00 ', }, 'WS-C1010XR-48FPS-I': {", "{ \"WS-F6700-DFC3CXL\": { \"descr\": \"WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev.", "\"WS-C6503-E-FAN 1\": { \"name\": \"WS-C6503-E-FAN 1\", \"descr\": \"Enhanced 3-slot Fan", "test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output", "V06, SN: SAL13516QS8 NAME: \"FAN-MOD-4HS 1\", DESCR: \"High Speed Fan", "TestShowPlatform as test_show_platform_iosxe,\\ TestShowPlatformPower as test_show_platform_power_iosxe,\\ TestShowVersionRp as test_show_version_rp_iosxe,\\ TestShowProcessesCpu", "empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { 'main': { 'chassis':", "self.golden_parsed_output) class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj =", "\"2700W AC power supply for CISCO7604 2\" PID: PWR-2700-AC/4 ,", "Configuration register is 0x2102 '''} parsed_output = { 'version': {", "\"AGA1515XZE2\", \"vid\": \"V05 \", } }, \"6\": { \"X2-10GB-SR\": {", "Rev. 1.1\" PID: WS-F6700-DFC3CXL , VID: V01, SN: SAL1214LAG5 NAME:", "NAME: \"CLK-7600 2\", DESCR: \"OSR-7600 Clock FRU 2\" PID: CLK-7600", "Fan Tray 1\", \"pid\": \"WS-C6503-E-FAN\", \"vid\": \"V02\", \"sn\": \"DCH183500KW\", }", "self.dev = Mock(**self.golden_output) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse(key_word='CPU', sort_time='5min')", "= obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff", "SN: ONT170202T1 NAME: \"Transceiver Te1/5\", DESCR: \"X2 Transceiver 10Gbase-SR Te1/5\"", "APS1707008Y NAME: \"PS 2 PWR-2700-AC/4\", DESCR: \"2700W AC power supply", "}, }, '16': { 'lc': { 'NM-16ESW': { 'descr': '16", "Software (C3750E-UNIVERSALK9-M), Version 15.2(2)E8, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport", "Model number : WS-C3750X-24P-L Daughterboard assembly number : 800-32727-03 Daughterboard", "\"SAL17152N0F\", \"subslot\": { \"0\": { \"VS-F6K-MSFC5\": { \"descr\": \"VS-F6K-MSFC5 CPU", "ID FXS1821Q2H9 SR71000 CPU at 600Mhz, Implementation 0x504, Rev 1.2,", "}, }, } } }, \"2\": { \"lc\": { \"WS-X6816-10GE\":", "2018', 'uptime': '9 weeks, 4 days, 2 hours, 3 minutes',", "'version': {'version_short': '15.2', 'platform': 'C3750E', 'version': '15.2(2)E8', 'image_id': 'C3750E-UNIVERSALK9-M', 'os':", "Systems, Inc. Compiled Thu 23-Nov-06 06:26 by kellythw Image text-base:", "power cycle at 21:57:23 UTC Sat Aug 28 2010 (SP", "test_golden(self): self.device = Mock(**self.golden_output) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse()", "DESCR: \"IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0\" PID:", "with parity disabled. 256K bytes of non-volatile configuration memory. 2097152K", "0xF '''} golden_parsed_output_ios_cat6k = { \"version\": { \"os\": \"IOS\", \"version_short\":", "version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_1) def test_golden_ios_2(self):", "Top Assembly Revision Number : B0 Version ID : V03", "self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def test_empty(self): self.device =", "267 -rw- 147988420 Mar 29 2017 00:00:00 +00:00 vios-adventerprisek9-m 268", "configuration\": \"256\" }, \"system_image\": \"flash0:/vios-adventerprisek9-m\", \"curr_config_register\": \"0x0\", \"rom\": \"Bootstrap program", "hours, 42 minutes\", \"config_register\": \"0x0\", \"curr_sw_state\": \"ACTIVE\" } } }", "class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def test_golden_port(self): self.device = Mock(**self.golden_output_port) obj = ShowPlatformHardwarePlim(device=self.device)", "Te2/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"AGA1515XZE2\", \"vid\": \"V05 \", } },", "\"OSPF-1 Hello\" ], \"five_sec_cpu_interrupts\": 0 } golden_output_1 = {'execute.return_value': '''", "SN: FOC85389QXB '''} golden_parsed_output_8 = { 'main': { 'chassis': {", "boot loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 15.2(3r)E, RELEASE", "None self.assertEqual( parsed_output, self.golden_parsed_output_slot_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj", "ethernet MAC Address : 84:3D:C6:FF:F1:B8 Motherboard assembly number : 73-15476-04", "dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaMissingKeyError): parsed_output = dir_obj.parse() def test_golden_iosv(self):", "cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = cpu_platform_obj.parse() class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe):", "SchemaEmptyParserError,\\ SchemaMissingKeyError from genie.libs.parser.ios.show_platform import ShowVersion,\\ Dir,\\ ShowRedundancy,\\ ShowInventory,\\ ShowBootvar,", "}, 'ipbase': { 'license_level': 'ipbasek9', 'license_type': 'Permanent', 'next_reload_license_level': 'ipbasek9', },", "on Slot 0 SubSlot 3', 'pid': 'HWIC-2FE', 'sn': 'FOC16062824', 'vid':", "prod_rel_team ROM: System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1) best-c3945-IOS3", "}, \"4\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te1/4\",", "'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'next_reload_license_level': 'ipservices',", "'sn': 'FTX7908A3RQ', 'vid': 'V05 ', }, }, }, 'slot': {", "'Virtual Private Network (VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-2',", "', }, }, 'other': { 'AIM-VPN/SSL-3': { 'descr': 'Encryption AIM", "NAME: \"Switch 1 - Power Supply 1\", DESCR: \"ABC Power", "A summary of U.S. laws governing Cisco cryptographic products may", "CLK-7600 , VID: , SN: FXS170802GL NAME: \"CLK-7600 2\", DESCR:", "class test_show_switch(test_show_switch_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitch(device=self.dev1)", "\"pid\": \"VS-SUP2T-10G\", \"vid\": \"V05\", \"sn\": \"SAL17152N0F\", \"subslot\": { \"0\": {", "\"nvram\": { \"last_modified_date\": \"Oct 17 2018 18:57:10 +00:00\", \"index\": \"268\",", "None self.dev_iosv = Mock(**self.golden_output_iosv) redundancy_obj = ShowRedundancy(device=self.dev_iosv) parsed_output = redundancy_obj.parse()", "obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse(key_word='CPU', sort_time='5min') self.assertEqual(parsed_output, self.golden_parsed_output) def", "\"Enhanced 3-slot Fan Tray 1\" PID: WS-C6503-E-FAN , VID: V02,", "self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardware(device=self.device1)", "NM-1T3/E3= , VID: V01 , SN: FOC28476ADM NAME: \"16 Port", "def test_golden_standby(self): self.device = Mock(**self.golden_output_standby) obj = ShowVersionRp(device=self.device) parsed_output =", "{ \"other\": { \"WS-C6503-E-FAN 1\": { \"name\": \"WS-C6503-E-FAN 1\", \"descr\":", "{ \"descr\": \"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\", \"name\": \"msfc sub-module", "golden_output_5 = {'execute.return_value': ''' best-c3945-IOS3#show inventory NAME: \"CISCO3945-CHASSIS\", DESCR: \"CISCO3945-CHASSIS\"", "\"WS-C8888X-88\" PID: WS-C0123X-45T-S , VID: V00 , SN: FDO123R12W NAME:", "self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_output_2(self): self.maxDiff = None self.device = Mock(**self.golden_output_2)", "'IOS', 'image_type': 'production image', 'compiled_date': 'Mon 22-Jan-18 04:07', 'compiled_by': 'prod_rel_team',", "self.dev1 = Mock(**self.semi_empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(KeyError): parsed_output =", "Model SW Version SW Image ------ ----- ----- ---------- ----------", "\"flash0:c3900-universalk9-mz.SPA.150-1.M7.bin\" Last reload type: Normal Reload Last reload reason: Reload", "', }, }, '2/1/1': { 'SFP-10G-LR': { 'descr': 'SFP-10GBase-LR', 'name':", "on Slot 2\", DESCR: \"16 Port 10BaseT/100BaseTX EtherSwitch\" PID: NM-16ESW", "Mock(**self.golden_output_c3850) platform_obj = ShowSwitch(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch_detail(test_show_switch_detail_iosxe):", "73-13061-04 Motherboard serial number : FDO1633Q14M Model revision number :", "= Mock(**self.golden_output_asr1k) platform_obj = ShowPlatform(device=self.dev_asr1k) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_asr1k)", "Assembly Part Number : 800-33746-04 Top Assembly Revision Number :", "Support: http://www.cisco.com/techsupport Copyright (c) 1986-2011 by Cisco Systems, Inc. Compiled", "SW Version SW Image ------ ----- ----- ---------- ---------- *", "PID SN ------------------------------------------------- *0 C3900-SPE150/K9 FOC16050QP6 Technology Package License Information", "ROM by power cycle at 21:57:23 UTC Sat Aug 28", "\"name\": \"4\", \"descr\": \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev.", "one minute: 23%; five minutes: 15% '''} golden_parsed_output_1 = {", "= redundancy_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv)", "distribute or use encryption. Importers, exporters, distributors and users are", "Current Processor Information : ------------------------------- Active Location = slot 0", "Rev. 1.5\", \"pid\": \"VS-SUP2T-10G\", \"vid\": \"V05\", \"sn\": \"SAL17152N0F\", \"subslot\": {", "'descr': 'SFP-10GBase-LR', 'name': 'TenGigabitEthernet2/1/1', 'pid': 'SFP-10G-LR', 'sn': 'ONT182746GZ', 'vid': 'V02", "five seconds: 13%/0%; one minute: 23%; five minutes: 15% '''}", "Supply 1', 'pid': 'PWR-C2-2929WAC', 'sn': 'LIT03728KKK', 'vid': 'V02L ', },", "10GE Rev. 2.0\" PID: WS-X6816-10GE , VID: V02, SN: SAL17152QB3", "data-base: 0x42D98000 ROM: System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)", "'V01 ', }, }, '1': { 'WIC-1B-U-V2': { 'descr': 'Wan", "Trunk - T1/E1 on Slot 0 SubSlot 1', 'pid': 'VWIC2-2MFT-T1/E1',", "'lc': { 'NM-1T3/E3=': { 'descr': 'Clear/Subrate T3/E3 WAN', 'name': 'Clear/Subrate", "with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(status='active') def test_golden_active(self): self.maxDiff = None", "\"SAL1128UPQ9\", \"subslot\": { \"0\": { \"WS-F6700-DFC3CXL\": { \"descr\": \"WS-F6700-DFC3CXL Distributed", "+00:00 nvram 269 -rw- 119 Oct 17 2018 18:57:18 +00:00", "test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowPlatformHardware(device=self.device) parsed_output = obj.parse()", "with U.S. and local laws, return this product immediately. A", "Forwarding Card 3 Rev. 1.1\" PID: WS-F6700-DFC3CXL , VID: V01,", "'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC65428K9F', 'vid': 'V01 ', }, }, '1':", "'subslot': { '1': { 'C1010X-STACK': { 'descr': 'Stacking Module', 'name':", "\"WS-F6K-PFC3BXL\": { \"descr\": \"WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8\",", "1 - FlexStackPlus Module', 'pid': 'C1010X-STACK', 'sn': 'FD232323XXZ', 'vid': 'V02", "TestShowProcessesCpu as test_show_processes_cpu_iosxe,\\ TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\\ TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\\ TestShowPlatformSoftwareStatusControlProcessorBrief", "'C3900', 'processor_board_flash': '2000880K', 'processor_type': 'C3900-SPE150/K9', 'returned_to_rom_at': '10:26:47 EST Mon Dec", "{ 'version': { 'chassis': 'CISCO3945-CHASSIS', 'chassis_sn': 'FGL161010K8', 'compiled_by': 'prod_rel_team', 'compiled_date':", "Cisco Systems, Inc. Compiled Wed 26-Jun-13 09:56 by prod_rel_team Image", "SR71000 CPU at 600Mhz, Implementation 0x504, Rev 1.2, 512KB L2", "DESCR: \"OSR-7600 Clock FRU 1\" PID: CLK-7600 , VID: ,", "'WS-C3750X-24S', 'main_mem': '524288', 'processor_type': 'PowerPC405', 'rtr_type': 'WS-C3750X-24S', 'chassis_sn': 'FDO1633Q14S', 'number_of_intfs':", "'pid': 'WIC-1B-U-V2', 'sn': '10293847', 'vid': 'V01', }, }, '4': {", "} def test_empty(self): self.dev1 = Mock(**self.empty_output) inventory_obj = ShowInventory(device=self.dev1) with", "class test_dir(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output =", "SubSlot 3', 'pid': 'HWIC-2FE', 'sn': 'FOC16062824', 'vid': 'V02 ', },", "\"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4\", \"pid\": \"WS-X6748-GE-TX\",", "= Mock(**self.golden_output) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse() self.maxDiff =", "status='active', slot='0', iotype='ipm') def test_golden_active_ipm(self): self.maxDiff = None self.device =", "FTX6666ARJ9 NAME: \"c3845 Motherboard with Gigabit Ethernet on Slot 0\",", "for Cisco 3900 ISR\" PID: C3900-SPE150/K9 , VID: V05 ,", "'FD5678Z90P', 'subslot': { '2': { 'C3KX-PWR-007CBA': { 'descr': 'BCA Power", "0x504, Rev 1.2, 512KB L2 Cache Last reset from s/w", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_8) def test_golden_output_9(self): self.maxDiff = None self.device =", "} golden_output_iosv = {'execute.return_value': '''\\ Redundant System Information : ------------------------------", "on Slot 2 SubSlot 0', 'pid': 'GE-DCARD-ESW', 'sn': 'FOC91864MNN', 'vid':", "using this product you agree to comply with applicable laws", "}, }, }, }, } def test_empty(self): self.dev1 = Mock(**self.empty_output)", "1', 'pid': 'PWR-3900-AC', 'sn': 'QCS1604P0BT', 'vid': 'V03 ', }, },", "= {'execute.return_value': '''\\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version", "Level: ipservices cisco WS-C3750X-24P (PowerPC405) processor (revision W0) with 262144K", "\"sn\": \"ONT170202UU\", \"vid\": \"V06 \", } }, \"4\": { \"X2-10GB-SR\":", "1 SubSlot 1', 'pid': 'EM-HDA-6FXO', 'sn': 'FOC85389QXB', 'vid': 'V03 ',", "}, } golden_output_7 = {'execute.return_value': ''' NAME: \"2821 chassis\", DESCR:", "{ \"main\": { \"chassis\": { \"WS-C6503-E\": { \"name\": \"WS-C6503-E\", \"descr\":", "golden_output_ios_cat6k = {'execute.return_value': ''' show version Cisco Internetwork Operating System", "00:32', 'curr_config_register': '0x2102', 'hostname': 'best-c3945-IOS3', 'image_id': 'C3900-UNIVERSALK9-M', 'image_type': 'production image',", "image\", 'processor_board_flash': '10080K', 'returned_to_rom_by': 'reload', \"main_mem\": \"435457\", \"mem_size\": { \"non-volatile", "Interface Card BRI U (2091, 3086)\" PID: WIC-1B-U-V2 , VID:", "'1 hour, 20 minutes', 'version': '15.0(1)M7', 'version_short': '15.0', }, }", "CPU at 600Mhz, Implementation 0x504, Rev 1.2, 512KB L2 Cache", "self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_c3850", "= Mock(**self.golden_output_ios_cat6k) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k)", "'vid': 'V01 ', }, }, }, }, }, }, '1':", "= ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_4) def test_golden_output_5(self): self.maxDiff", "golden_parsed_output_iosv = { \"version\": { \"last_reload_reason\": \"Unknown reason\", \"hostname\": \"N95_1\",", "parsed_output = obj.parse() class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def test_golden_port(self): self.device = Mock(**self.golden_output_port)", "Daughterboard Rev. 3.1\" PID: WS-SUP720 , VID: , SN: SAL11434N9G", "Mock(**self.golden_output) platform_obj = ShowProcessesCpuHistory(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class", "parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch_detail(test_show_switch_detail_iosxe): def test_empty(self): self.dev1 =", "Feature Card 4 Rev. 2.0\", \"name\": \"VS-F6K-PFC4 Policy Feature Card", "'V01L ', }, }, '2/1/1': { 'SFP-10G-LR': { 'descr': 'SFP-10GBase-LR',", "Mock(**self.golden_output_iosv) platform_obj = ShowBootvar(device=self.dev_iosv) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class", "- T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk -", "test_golden_output_3(self): self.maxDiff = None self.device = Mock(**self.golden_output_3) obj = ShowInventory(device=self.device)", "0x00003000, data-base: 0x02800000 ROM: Bootstrap program is C3750E boot loader", "to United States and local country laws governing import, export,", "= ShowVersionRp(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform(test_show_platform_iosxe): def", "Network (VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-2', 'sn': 'FOC2837465E',", "test_golden_subslot(self): self.device = Mock(**self.golden_output_subslot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(subslot='0/1')", "1400 watt 1\" PID: PWR-1400-AC , VID: V01, SN: ABC0830J127", "= Mock(**self.golden_output_ios_1) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_1)", "{ '0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48", "\"2\", \"descr\": \"WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0\", \"pid\":", "0x42D98000 ROM: System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1) BOOTLDR:", "2.0\", \"pid\": \"WS-X6816-10GE\", \"vid\": \"V02\", \"sn\": \"SAL17152QB3\", \"subslot\": { \"0\":", "PID: WS-X6748-GE-TX , VID: V02, SN: SAL1128UPQ9 NAME: \"switching engine", "10Gbase-SR Te2/2\" PID: X2-10GB-SR , VID: V06 , SN: ONT1702020H", "'WIC-1B-U-V2': { 'descr': 'Wan Interface Card BRI U (2091, 3086)',", "\"subslot\": { \"0\": { \"WS-F6K-DFC4-A\": { \"descr\": \"WS-F6K-DFC4-A Distributed Forwarding", "{ \"descr\": \"X2 Transceiver 10Gbase-SR Te2/3\", \"name\": \"Transceiver Te2/3\", \"pid\":", "}, }, } golden_output_3 = {'execute.return_value': ''' # show inventory", "'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'main_mem': '262144', 'mem_size': {'flash-simulated non-volatile", "'ipbasek9', }, 'security': { 'license_level': 'securityk9', 'license_type': 'Permanent', 'next_reload_license_level': 'securityk9',", "System\", \"pid\": \"WS-C6503-E\", \"vid\": \"V03\", \"sn\": \"FXS1821Q2H9\", } } },", "\"msfc sub-module of 1\", DESCR: \"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\"", "ports Supervisor Engine 720 Rev. 5.6\", \"pid\": \"WS-SUP720-3BXL\", \"vid\": \"V05\",", "ACW102938VS '''} golden_parsed_output_4 = { 'slot': { '1': { 'rp':", "is C3750E boot loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version", "'BCA Power Supply', 'name': 'Switch 2 - Power Supply 1',", "{'execute.return_value': '''\\ Redundant System Information : ------------------------------ Available system uptime", "test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def test_golden(self): self.device = Mock(**self.golden_output_serdes_internal) obj = ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output", "platform_obj.parse( status='active', slot='0', iotype='opm') self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def test_empty(self):", "{ \"name\": \"CLK-7600 2\", \"descr\": \"OSR-7600 Clock FRU 2\", \"pid\":", "\"WS-F6700-CFC Centralized Forwarding Card Rev. 4.1\" PID: WS-F6700-CFC , VID:", "def test_golden(self): self.device = Mock(**self.golden_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device) parsed_output =", "on Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675U0D', 'vid':", "ipservices cisco WS-C3750X-24S (PowerPC405) processor (revision A0) with 524288K bytes", "NAME: \"Transceiver Te2/4\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/4\" PID: X2-10GB-SR", "(VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-2', 'sn': 'FOC2837465E', 'vid':", "\"boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\", \"configuration_register\": \"0x2012\" }, \"next_reload_boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\" } golden_output_iosv =", "'other': { 'C3900 AC Power Supply 1': { 'descr': 'C3900", "\"vid\": \"V04\", \"sn\": \"SAL14017TWF\", \"subslot\": { \"0\": { \"WS-F6700-CFC\": {", "PID: VS-SUP2T-10G , VID: V05, SN: SAL17152N0F NAME: \"msfc sub-module", "FOC729346GQ NAME: \"Virtual Private Network (VPN) Module on Slot 0\",", "FNS153920YJ NAME: \"Transceiver Te2/16\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/16\" PID:", "for Cisco 3900 ISR on Slot 0\", DESCR: \"Cisco Services", "8FXS/DID', 'name': 'High Density Voice Module - 8FXS/DID on Slot", "= platform_obj.parse() def test_golden_c3850(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850)", "\"Transceiver Te2/5\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/5\" PID: X2-10GB-SR ,", "AC Power Supply 1\" PID: PWR-3900-AC , VID: V03 ,", "as test_show_env_iosxe,\\ TestShowModule as test_show_module_iosxe,\\ TestShowSwitch as test_show_switch_iosxe,\\ TestShowSwitchDetail as", "VID: V02 , SN: FD5678Z90P NAME: \"Switch 2 - Power", ", VID: V02L , SN: LIT03728KKK NAME: \"Switch 1 -", "Version 12.2(55)SE8, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c)", "'pid': 'GE-DCARD-ESW', 'sn': 'FOC91864MNN', 'vid': 'V01 ', }, }, },", "None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformPower(device=self.device) parsed_output = platform_obj.parse()", "DESCR: \"2700W AC power supply for CISCO7604 1\" PID: PWR-2700-AC/4", "with 983008K/65536K bytes of memory. Processor board ID FXS1821Q2H9 SR71000", "VID: V05 , SN: FGL161010K8 NAME: \"Cisco Services Performance Engine", "\"vid\": \"V03\", \"sn\": \"FXS1821Q2H9\", } } }, \"slot\": { \"CLK-7600", "slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) def test_empty(self): self.device1 =", "= Mock(**self.golden_output_1) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1)", "number : W0 Motherboard revision number : B0 Model number", "Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)\", \"hostname\": \"cat6k_tb1\", \"uptime\": \"10 weeks,", "2018 18:57:10 +00:00 nvram 269 -rw- 119 Oct 17 2018", "} }, \"5\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR", "self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output =", "} golden_output_iosv = {'execute.return_value': '''\\ Cisco IOS Software, IOSv Software", "'V01', }, }, '4': { 'PVDM2-64': { 'descr': 'PVDMII DSP", "'prod_rel_team', 'compiled_date': 'Wed 26-Jun-13 09:56', 'hostname': 'R5', 'image_id': 'C3750E-UNIVERSALK9-M', 'image_type':", "* 1 30 WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M Configuration register is 0xF", "test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='active',", "'platform': 'C3900', 'processor_board_flash': '2000880K', 'processor_type': 'C3900-SPE150/K9', 'returned_to_rom_at': '10:26:47 EST Mon", "obj.parse(slot='0', internal=True) self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_slot_internal) def test_empty(self):", ", VID: V02 , SN: FD232323XXZ NAME: \"GigabitEthernet1/0/49\", DESCR: \"1000BaseSX", "}, }, }, '1': { 'other': { 'EVM-HD-8FXS/DID': { 'descr':", "18:57:10 +00:00\", \"index\": \"268\", \"size\": \"524288\", \"permissions\": \"-rw-\" }, \"boot\":", "with with 435457K/87040K bytes of memory. Processor board ID 9K66Z7TOKAACDEQA24N7S", "\"WS-F6700-DFC3CXL\": { \"descr\": \"WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1\",", "for CISCO7604 2\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS17070093\", }", "ONT170201TT NAME: \"3\", DESCR: \"WS-X6824-SFP CEF720 24 port 1000mb SFP", "PWR-2700-AC/4\", DESCR: \"2700W AC power supply for CISCO7604 2\" PID:", "}, \"5\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te1/5\",", "1986-2017 by Cisco Systems, Inc. Compiled Wed 29-Mar-17 14:05 by", "= None self.dev_iosv = Mock(**self.golden_output_iosv) version_obj = ShowVersion(device=self.dev_iosv) parsed_output =", "\"X2-10GB-SR\", \"sn\": \"ONT17020338\", \"vid\": \"V06 \", } }, \"2\": {", "\"communications\": \"Down\", \"hw_mode\": \"Simplex\", \"communications_reason\": \"Failure\", \"standby_failures\": \"0\" }, \"slot\":", "self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitchDetail(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "Density Voice Module - 8FXS/DID on Slot 1\", DESCR: \"High", "Motherboard revision number : B0 Model number : WS-C3750X-24P-L Daughterboard", "\"vid\": \"V05\", \"sn\": \"SAL17152N0F\", \"subslot\": { \"0\": { \"VS-F6K-MSFC5\": {", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_empty(self): self.device1 = Mock(**self.empty_output)", "'WS-C0123X-45T-S', 'sn': 'FDO123R12W', 'subslot': { '1': { 'C3KX-PWR-350WAC': { 'descr':", "NM on Slot 2 SubSlot 0', 'pid': 'GE-DCARD-ESW', 'sn': 'FOC91864MNN',", "Mock(**self.golden_output_standby) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='standby', status='running') self.maxDiff =", "'C3900-SPE150/K9': { 'descr': 'Cisco Services Performance Engine 150 for Cisco", "configuration memory. 2000880K bytes of ATA System CompactFlash 0 (Read/Write)", "} } }, }, } golden_output_3 = {'execute.return_value': ''' #", "License Level: ipservices License Type: Permanent Next reload license Level:", "Assembly Part Number : 800-38990-01 Top Assembly Revision Number :", "= Mock(**self.empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsered_output = dir_obj.parse()", "'chassis': 'WS-C3750X-24S', 'main_mem': '524288', 'processor_type': 'PowerPC405', 'rtr_type': 'WS-C3750X-24S', 'chassis_sn': 'FDO1633Q14S',", "Catalyst 6500 3-slot Chassis System\" PID: WS-C6503-E , VID: V03,", "'chassis': { 'CISCO3945-CHASSIS': { 'descr': 'CISCO3945-CHASSIS', 'name': 'CISCO3945-CHASSIS', 'pid': 'CISCO3945-CHASSIS',", "10 2018 System image file is \"flash:c3750e-universalk9-mz\" This product contains", "DESCR: \"WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0\" PID: WS-X6816-10GE", "SN: FXS181101V4 NAME: \"CLK-7600 2\", DESCR: \"OSR-7600 Clock FRU 2\"", "SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2011 by Cisco", "FRU 2\" PID: CLK-7600 , VID: , SN: FXS170802GL NAME:", "\"vid\": \"\", }, \"WS-F6K-PFC3BXL\": { \"descr\": \"WS-F6K-PFC3BXL Policy Feature Card", "'other': { 'AIM-VPN/SSL-3': { 'descr': 'Encryption AIM Element', 'name': 'Virtual", "\"PS 1 PWR-1400-AC\": { \"name\": \"PS 1 PWR-1400-AC\", \"descr\": \"AC", "def test_empty(self): self.dev1 = Mock(**self.empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(AttributeError):", "\"one_min_cpu\": 0.54, \"process\": \"PIM Process\", \"five_min_cpu\": 0.48, \"runtime\": 362874, \"pid\":", "Slot 1\", DESCR: \"High Density Voice Module - 8FXS/DID\" PID:", "'descr': '3845 chassis', 'name': '3845 chassis', 'pid': 'CISCO3845', 'sn': 'FTX6666ARJ9',", "\"vid\": \"V01\", \"sn\": \"SAL17152EG9\", \"subslot\": { \"0\": { \"WS-F6K-DFC4-A\": {", "'FOC729346GQ', 'vid': 'V09 ', }, }, 'other': { 'AIM-VPN/SSL-3': {", "self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output", "}, 'C3900 AC Power Supply 1': { 'other': { 'C3900", "EM-HDA-6FXO , VID: V03 , SN: FOC85389QXB '''} golden_parsed_output_8 =", "\"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\", } } }, \"1\": {", "'''\\ BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE variable = BOOTLDR variable", "RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2013 by", "\"slot 0\": { \"image_ver\": \"Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M),", "PID: CISCO3845 , VID: V05 , SN: FTX6666ARJ9 NAME: \"c3845", "Oct 14 2013 00:00:00 +00:00 config 267 -rw- 147988420 Mar", "Revision Number : B0 Version ID : V03 CLEI Code", "= platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_asr1k) class test_show_platform_power(test_show_platform_power_iosxe): def test_empty(self): self.device =", "10293847 NAME: \"PVDMII DSP SIMM with four DSPs on Slot", "'3825 chassis', 'name': '3825 chassis', 'pid': 'CISCO3825', 'sn': 'FTX7908A3RQ', 'vid':", "ShowModule(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch(test_show_switch_iosxe): def test_empty(self): self.dev1", "\"size\": \"147988420\", \"permissions\": \"-rw-\" } }, \"bytes_total\": \"2142715904\", \"bytes_free\": \"1989595136\"", "interfaces DRAM configuration is 72 bits wide with parity disabled.", "ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_4) def test_golden_output_5(self): self.maxDiff =", "obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse(key_word='process') self.assertEqual(parsed_output, self.golden_parsed_output_1) def test_empty(self):", "'PVDMII DSP SIMM with four DSPs', 'name': 'PVDMII DSP SIMM", "} }, \"slot\": { \"CLK-7600 1\": { \"other\": { \"CLK-7600", "Sat Aug 28 2010', 'returned_to_rom_by': 'power cycle', \"rtr_type\": \"WS-C6503-E\", \"chassis_sn\":", "'descr': 'WS-C1010XR-48FPS-I', 'name': '1', 'pid': 'WS-C1010XR-48FPS-I', 'sn': 'FD2043B0K3', 'subslot': {", "'subslot': { '2': { 'C3KX-PWR-007CBA': { 'descr': 'BCA Power Supply',", "None self.dev_iosv = Mock(**self.golden_output_ios) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse()", "U.S. and local laws, return this product immediately. A summary", "empty_output = {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\\ Directory of", "'12:22:21 PDT Mon Sep 10 2018', 'uptime': '9 weeks, 4", "Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)\", \"uptime_in_curr_state\": \"1 day,", "'WS-C1010XR-48FPS-I': { 'descr': 'WS-C1010XR-48FPS-I', 'name': '1', 'pid': 'WS-C1010XR-48FPS-I', 'sn': 'FD2043B0K3',", "2442 0.55% 0.87% 2.77% 0 IOSv e1000 412 113457 116196", "255K bytes of non-volatile configuration memory. 2000880K bytes of ATA", "'FastEthernet': '2', 'Gigabit Ethernet': '3', }, 'os': 'IOS', 'platform': 'C3900',", "}, }, } golden_output_9 = {'execute.return_value': ''' NAME: \"3845 chassis\",", "= ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_slot)", "NAME: \"Transceiver Te2/16\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/16\" PID: X2-10GB-SR", "'CISCO3825': { 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_8) def test_golden_output_9(self): self.maxDiff = None", "sub-module of 1\", \"pid\": \"VS-F6K-PFC4\", \"sn\": \"SAL17163901\", \"vid\": \"V03\", },", "| inc CPU CPU utilization for five seconds: 13%/0%; one", "'FOC65428K9F', 'vid': 'V01 ', }, }, '1': { 'WIC-1B-U-V2': {", "image', 'compiled_date': 'Mon 22-Jan-18 04:07', 'compiled_by': 'prod_rel_team', 'rom': 'Bootstrap program", "Te2/1\", \"name\": \"Transceiver Te2/1\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT17020338\", \"vid\": \"V06", "by cisco Systems, Inc. Compiled Thu 23-Nov-06 06:26 by kellythw", "'rtr_type': 'WS-C3750X-24P', 'system_image': 'flash:c3750e-universalk9-mz', 'system_restarted_at': '12:22:21 PDT Mon Sep 10", "(copyright 1990 by Meridian Technology Corp). X.25 software, Version 3.0.0.", "VID: V05, SN: SAL11434P2C NAME: \"msfc sub-module of 1\", DESCR:", "1 hour, 20 minutes System returned to ROM by reload", "bytes of flash-simulated non-volatile configuration memory. Base ethernet MAC Address", "TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\\ TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\\ TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap", "Bootstrap program is IOSv '''} golden_parsed_output_iosv = { \"version\": {", "\"vid\": \"V02\", \"sn\": \"SAL1128UPQ9\", \"subslot\": { \"0\": { \"WS-F6700-DFC3CXL\": {", "\"4\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/4\", \"name\":", "you agree to comply with applicable laws and regulations. If", "NAME: \"WS-C6503-E\", DESCR: \"Cisco Systems Catalyst 6500 3-slot Chassis System\"", "Interface Card on Slot 0 SubSlot 3\", DESCR: \"Two-Port Fast", "(revision A0) with 524288K bytes of memory. Processor board ID", "}, '16': { 'lc': { 'NM-16ESW': { 'descr': '16 Port", "product immediately. A summary of U.S. laws governing Cisco cryptographic", "Daughterboard Rev. 2.0\", \"name\": \"msfc sub-module of 1\", \"pid\": \"VS-F6K-MSFC5\",", "test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def test_golden_active_ipm(self): self.device = Mock(**self.golden_output_active_ipm) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output", ": 800-32727-03 Daughterboard serial number : FDO202823P8 System serial number", "30 2013 00:00:00 +00:00\", \"index\": \"1\", \"size\": \"0\", \"permissions\": \"drw-\"", "\"R7000\", 'sp_by': 'power on', 'returned_to_rom_at': '21:57:23 UTC Sat Aug 28", "EARL sub-module of 2\", \"pid\": \"WS-F6K-DFC4-E\", \"sn\": \"SAL171846RF\", \"vid\": \"V02\",", "0\": { \"image_ver\": \"Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version", "802.3 interfaces 1917K bytes of non-volatile configuration memory. 8192K bytes", "VID: V03 , SN: SPC1519005V NAME: \"2\", DESCR: \"WS-C3210X-48\" PID:", "\"10 weeks, 5 days, 5 hours, 16 minutes\", \"system_image\": \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\",", "3-slot Fan Tray 1\", \"pid\": \"WS-C6503-E-FAN\", \"vid\": \"V02\", \"sn\": \"DCH183500KW\",", "test_golden_slot_internal(self): self.device = Mock(**self.golden_output_slot_internal) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0',", "\"unsupported\", \"maint_mode\": \"Disabled\", \"switchovers_system_experienced\": \"0\", \"available_system_uptime\": \"0 minutes\", \"communications\": \"Down\",", "EARL sub-module of 2\", DESCR: \"WS-F6K-DFC4-E Distributed Forwarding Card 4", "platform_obj = ShowModule(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch(test_show_switch_iosxe): def", "{ 'CISCO3845': { 'descr': '3845 chassis', 'name': '3845 chassis', 'pid':", "VID: V01 , SN: FOC91864MNN '''} golden_parsed_output_9 = { 'main':", "\"name\": \"Transceiver Te2/2\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702020H\", \"vid\": \"V06 \",", "{ 'lc': { 'NM-16ESW': { 'descr': '16 Port 10BaseT/100BaseTX EtherSwitch',", "\"curr_config_register\": \"0x2102\", } } golden_output_ios_cat6k = {'execute.return_value': ''' show version", "\"Switch 1 - Power Supply 1\", DESCR: \"LLL Power Supply\"", "'pid': 'PWR-3900-AC', 'sn': 'QCS1604P0BT', 'vid': 'V03 ', }, }, },", "def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowVersionRp(device=self.device1) with self.assertRaises(SchemaEmptyParserError):", "class test_show_processes_cpu(test_show_processes_cpu_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) obj = ShowProcessesCpu(device=self.device)", "\"WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6\", \"pid\": \"WS-SUP720-3BXL\",", "parsed_output = inventory_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv =", "Distributed Forwarding Card 4 Rev. 1.0\" PID: WS-F6K-DFC4-A , VID:", "to <EMAIL>. License Level: ipservices License Type: Permanent Next reload", "Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1) best-c3945-IOS3 uptime is 1", "Copyright (c) 1986-2017 by Cisco Systems, Inc. Compiled Wed 29-Mar-17", "2 (Read/Write) 10080K bytes of ATA CompactFlash 3 (Read/Write) Configuration", "test_show_switch_detail_iosxe class TestShowVersion(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output", "------------------------------- Active Location = slot 0 Current Software state =", "}, \"CLK-7600 2\": { \"other\": { \"CLK-7600 2\": { \"name\":", "V01, SN: DCH170900PF NAME: \"PS 1 PWR-2700-AC/4\", DESCR: \"2700W AC", "'0': { 'GE-DCARD-ESW': { 'descr': 'Gigabit(1000BaseT) module for EtherSwitch NM',", "= Mock(**self.empty_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse()", "'vid': 'V02 ', }, }, }, 'vid': 'V02 ', },", "SN: APS17070093 '''} golden_parsed_output_3 = { \"main\": { \"chassis\": {", "Mock(**self.golden_output_active) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) parsed_output = platform_obj.parse(status='active') self.assertEqual(parsed_output, self.golden_parsed_output_active)", "def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) with", "four DSPs', 'name': 'PVDMII DSP SIMM with four DSPs on", "- 2-Port RJ-48 Multiflex Trunk - T1/E1\" PID: VWIC2-2MFT-T1/E1 ,", "\"0\", \"permissions\": \"drw-\" }, \"nvram\": { \"last_modified_date\": \"Oct 17 2018", "'C3900 AC Power Supply 1': { 'other': { 'C3900 AC", "'VWIC2-2MFT-T1/E1', 'sn': 'FOC65428K9F', 'vid': 'V01 ', }, }, '1': {", "00:00:00 +00:00\", \"index\": \"264\", \"size\": \"0\", \"permissions\": \"drw-\" }, \"nvram\":", "require further assistance please contact us by sending email to", "= ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_2) def test_golden_output_3(self): self.maxDiff", "2.6\" PID: WS-X6748-GE-TX , VID: V02, SN: SAL1128UPQ9 NAME: \"switching", "} }, \"2\": { \"lc\": { \"WS-X6816-10GE\": { \"name\": \"2\",", "', }, }, }, }, } def test_empty(self): self.dev1 =", "total (1989595136 bytes free) '''} def test_empty(self): self.dev1 = Mock(**self.empty_output)", "platform_obj = ShowPlatform(device=self.dev_asr1k) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_asr1k) class test_show_platform_power(test_show_platform_power_iosxe):", "\"WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of 3\", DESCR:", "us by sending email to <EMAIL>. License Level: ipservices License", "\"other\": { \"PS 1 PWR-1400-AC\": { \"name\": \"PS 1 PWR-1400-AC\",", "'main': { 'chassis': { 'CISCO2821': { 'descr': '2821 chassis', 'name':", "'next_reload_license_level': 'ipbasek9', }, 'security': { 'license_level': 'securityk9', 'license_type': 'Permanent', 'next_reload_license_level':", "assistance please contact us by sending email to <EMAIL>. Cisco", "\"PS 2 PWR-2700-AC/4\": { \"other\": { \"PS 2 PWR-2700-AC/4\": {", "platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') def test_golden(self): self.maxDiff = None self.device", "'IOSv', 'sn': '9K66Z7TOKAACDEQA24N7S', 'vid': '1.0', }, }, }, } golden_output_iosv", "Device# PID SN ------------------------------------------------- *0 C3900-SPE150/K9 FOC16050QP6 Technology Package License", "Mock(**self.golden_output_ios_cat6k) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k) def", "by power-on System restarted at 05:06:40 GMT Tue Sep 10", "VID: V01 , SN: FOC98675W3E NAME: \"Virtual Private Network (VPN)", "Assembly Revision Number : F0 Version ID : V07 CLEI", "\"VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1\", DESCR:", "FXS181101V4 NAME: \"1\", DESCR: \"WS-SUP720-3BXL 2 ports Supervisor Engine 720", "{ '1': { 'EM-HDA-6FXO': { 'descr': 'Six port FXO voice", "'production image', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'main_mem': '262144',", "with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') def test_golden(self):", "{'execute.return_value': '''\\ Directory of flash:/ '''} golden_parsed_output_iosv = { \"dir\":", "and use. Delivery of Cisco cryptographic products does not imply", "for Cisco 3900 ISR on Slot 0', 'pid': 'C3900-SPE150/K9', 'sn':", "Copyright (c) 1986-2013 by Cisco Systems, Inc. Compiled Wed 26-Jun-13", "test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwarePlim(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "Device(name='ios') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv =", "PID: X2-10GB-SR , VID: V06 , SN: ONT170201TT NAME: \"3\",", "{ 'C3900 AC Power Supply 1': { 'descr': 'C3900 AC", "= Disabled Communications = Down Reason: Failure Current Processor Information", "} }, \"3\": { \"lc\": { \"WS-X6824-SFP\": { \"name\": \"3\",", ", VID: V01, SN: SAL1214LAG5 NAME: \"WS-C6503-E-FAN 1\", DESCR: \"Enhanced", "V03, SN: APS17070093 '''} golden_parsed_output_3 = { \"main\": { \"chassis\":", "- FlexStackPlus Module', 'pid': 'C1010X-STACK', 'sn': 'FD232323XXZ', 'vid': 'V02 ',", "'descr': 'Wan Interface Card BRI U (2091, 3086)', 'name': 'Wan", "\"268\", \"size\": \"524288\", \"permissions\": \"-rw-\" }, \"boot\": { \"last_modified_date\": \"Jan", "V06 , SN: ONT1702033D NAME: \"2\", DESCR: \"WS-X6816-10GE CEF720 16", "self.assertRaises(SchemaMissingKeyError): parsed_output = dir_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv", "2027520K/69632K bytes of memory. Processor board ID FGL161010K8 2 FastEthernet", "test_golden_standby(self): self.device = Mock(**self.golden_output_standby) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='standby',", "'sn': 'LTP13579L3R', 'vid': 'V01L ', }, }, '2/1/1': { 'SFP-10G-LR':", "0.87, \"process\": \"IOSv e1000\", \"five_min_cpu\": 2.77, \"runtime\": 3582279, \"pid\": 84,", "'image_id': 'C3750E-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type':", "dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} semi_empty_output = {'execute.return_value':", "{ \"last_switchover_reason\": \"unsupported\", \"maint_mode\": \"Disabled\", \"switchovers_system_experienced\": \"0\", \"available_system_uptime\": \"0 minutes\",", "\"Wan Interface Card BRI U (2091, 3086)\" PID: WIC-1B-U-V2 ,", "'next_reload_license_level': 'datak9', }, 'ipbase': { 'license_level': 'ipbasek9', 'license_type': 'Permanent', 'next_reload_license_level':", "{ 'lc': { 'CISCO3845-MB': { 'descr': 'c3845 Motherboard with Gigabit", "test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowModule(device=self.dev_c3850)", "self.dev1 = Mock(**self.semi_empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaMissingKeyError): parsed_output =", "Te2/2\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/2\" PID: X2-10GB-SR , VID:", "power supply for CISCO7604 1\" PID: PWR-2700-AC/4 , VID: V03,", "1\", \"pid\": \"PWR-1400-AC\", \"vid\": \"V01\", \"sn\": \"ABC0830J127\", } } },", "Transceiver 10Gbase-SR Te1/4\" PID: X2-10GB-SR , VID: V06 , SN:", "status='active', slot='0', iotype='opm') self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def test_empty(self): self.device", ", SN: FXS170802GL NAME: \"CLK-7600 2\", DESCR: \"OSR-7600 Clock FRU", "\"WS-SUP720-3BXL\": { \"name\": \"1\", \"descr\": \"WS-SUP720-3BXL 2 ports Supervisor Engine", "Number : CMMFF00ARC Hardware Board Revision Number : 0x04 Switch", "golden_parsed_output_iosv = { \"dir\": { \"flash0:/\": { \"files\": { \"e1000_bia.txt\":", "\"Transceiver Te2/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"AGA1515XZE2\", \"vid\": \"V05 \", }", "', }, }, }, 'slot': { '0': { 'lc': {", "8FXS/DID\" PID: EVM-HD-8FXS/DID , VID: V04 , SN: FOC65798TG8 NAME:", "ShowPlatformSoftwareSlotActiveMonitorMem,\\ ShowPlatformHardware,\\ ShowPlatformHardwarePlim,\\ ShowPlatformHardwareQfpBqsOpmMapping,\\ ShowPlatformHardwareQfpBqsIpmMapping,\\ ShowPlatformHardwareSerdes,\\ ShowPlatformHardwareSerdesInternal,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\\ ShowPlatformHardwareQfpStatisticsDrop,\\", "(C3750E-UNIVERSALK9-M), Version 15.2(2)E8, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright", "\"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V04\", \"sn\": \"SAL14017TWF\", \"subslot\": { \"0\": {", "Forwarding Card 4 EARL sub-module of 2\", DESCR: \"WS-F6K-DFC4-E Distributed", "= dir_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv)", "\"system_image\": \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\", \"chassis\": \"WS-C6503-E\", \"main_mem\": \"983008\", \"processor_type\": \"R7000\", 'sp_by': 'power", "'sn': '10293847', 'vid': 'V01', }, }, '4': { 'PVDM2-64': {", "\"WS-X6748-GE-TX\", \"vid\": \"V02\", \"sn\": \"SAL1128UPQ9\", \"subslot\": { \"0\": { \"WS-F6700-DFC3CXL\":", "reset SuperLAT software (copyright 1990 by Meridian Technology Corp). X.25", "'''} golden_output_4 = {'execute.return_value': ''' NAME: \"1\", DESCR: \"WS-C8888X-88\" PID:", "SN: FTX7908A3RQ NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk -", "DESCR: \"WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2\" PID: WS-F6K-DFC4-E", "DESCR: \"SFP-10GBase-SR\" PID: SFP-10G-SR , VID: V03 , SN: SPC1519005V", "'15.2(2)E8', 'image_id': 'C3750E-UNIVERSALK9-M', 'os': 'IOS', 'image_type': 'production image', 'compiled_date': 'Mon", "SM-ES2-16-P , VID: , SN: FOC09876NP3 '''} golden_parsed_output_6 = {", "number : 800-32727-03 Daughterboard serial number : FDO202823P8 System serial", "\"c3845 Motherboard with Gigabit Ethernet\" PID: CISCO3845-MB , VID: V09", "\"boot\": { \"last_modified_date\": \"Jan 30 2013 00:00:00 +00:00\", \"index\": \"1\",", "'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port", "ShowProcessesCpu(device=self.device) parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def", ", SN: FD232323XXZ NAME: \"GigabitEthernet1/0/49\", DESCR: \"1000BaseSX SFP\" PID: GLC-SX-MMD", "PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675U0D NAME: \"VWIC2-2MFT-T1/E1", "Te2/16\" PID: X2-10GB-SR , VID: V06 , SN: ONT170201TT NAME:", "= cpu_platform_obj.parse() class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj", "', }, }, '1': { 'WIC-1B-U-V2': { 'descr': 'Wan Interface", "{ 'main': { 'chassis': { 'CISCO3945-CHASSIS': { 'descr': 'CISCO3945-CHASSIS', 'name':", "Clock FRU 1\" PID: CLK-7600 , VID: , SN: FXS181101V4", "'V05 ', }, }, }, 'C3900 AC Power Supply 1':", "test_golden_standby_offline(self): self.device = Mock(**self.golden_output_standby_offline) obj = ShowVersionRp(device=self.device) self.maxDiff = None", "\"2\", DESCR: \"WS-C3210X-48\" PID: WS-C3210X-48T-S , VID: V02 , SN:", "hours, 42 minutes\" } } golden_output_iosv = {'execute.return_value': '''\\ Cisco", "in current state = 1 day, 16 hours, 42 minutes", "V03, SN: APS1707008Y NAME: \"PS 2 PWR-2700-AC/4\", DESCR: \"2700W AC", "PWR-1400-AC\", \"descr\": \"AC power supply, 1400 watt 1\", \"pid\": \"PWR-1400-AC\",", "} }, }, } } }, \"3\": { \"lc\": {", "\"WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of 2\", DESCR:", ", SN: FOC135464KO NAME: \"Gigabit(1000BaseT) module for EtherSwitch NM on", "SubSlot 1\", DESCR: \"Wan Interface Card BRI U (2091, 3086)\"", "Ethernet interfaces DRAM configuration is 72 bits wide with parity", "Power Supply 1', 'pid': 'C3KX-PWR-350WAC', 'sn': 'DTN1504L0E9', 'vid': 'V01D ',", "Command This product contains cryptographic features and is subject to", "= ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff", "'10080K', 'returned_to_rom_by': 'reload', \"main_mem\": \"435457\", \"mem_size\": { \"non-volatile configuration\": \"256\"", "PID: X2-10GB-SR , VID: V06 , SN: ONT170202T5 NAME: \"Transceiver", "\"name\": \"switching engine sub-module of 1\", \"pid\": \"WS-F6K-PFC3BXL\", \"sn\": \"SAL11434LYG\",", "PID: FAN-MOD-4HS , VID: V01, SN: DCH170900PF NAME: \"PS 1", "2: { \"invoked\": 1466728, \"usecs\": 2442, \"tty\": 0, \"one_min_cpu\": 0.87,", "DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1\" PID:", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_port) def test_golden_slot(self): self.device = Mock(**self.golden_output_slot)", "Chassis System\", \"pid\": \"WS-C6503-E\", \"vid\": \"V03\", \"sn\": \"FXS1821Q2H9\", } }", "{'execute.return_value': '''\\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 15.2(2)E8,", "'2', 'Gigabit Ethernet': '3', }, 'os': 'IOS', 'platform': 'C3900', 'processor_board_flash':", "PID: WS-C0123X-45T-S , VID: V00 , SN: FDO123R12W NAME: \"Switch", "\"config\": { \"last_modified_date\": \"Oct 14 2013 00:00:00 +00:00\", \"index\": \"264\",", "{ 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6' } } }, 'license_package': {", "WS-SUP720 , VID: , SN: SAL11434N9G NAME: \"switching engine sub-module", "dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsered_output = dir_obj.parse() def test_semi_empty(self):", ", VID: V09 , SN: FOC729346GQ NAME: \"Virtual Private Network", "module for EtherSwitch NM\" PID: GE-DCARD-ESW , VID: V01 ,", "seconds: 4%/0%; one minute: 4%; five minutes: 9% PID Runtime(ms)", "ROM: Bootstrap program is IOSv '''} golden_parsed_output_iosv = { \"version\":", "Cisco Systems, Inc. Compiled Mon 22-Jan-18 04:07 by prod_rel_team ROM:", "\"pid\": 368, \"five_sec_cpu\": 1.03 }, 2: { \"invoked\": 1466728, \"usecs\":", "'1': { 'rp': { 'WS-C0123X-45T-S': { 'descr': 'WS-C8888X-88', 'name': '1',", "self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_serdes_internal) def test_empty(self): self.device1 =", "self.device = Mock(**self.golden_output) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse() self.maxDiff", "}, }, }, 'vid': 'V02 ', }, }, }, },", "Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0\" PID: IOSv , VID:", "'descr': 'Cisco Services Performance Engine 150 for Cisco 3900 ISR',", "status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_standby) def test_golden_standby_offline(self): self.device =", "platform_obj = ShowBootvar(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self):", ": ------------------------------ Available system uptime = 0 minutes Switchovers system", "{ \"version\": { \"os\": \"IOS\", \"version_short\": \"12.2\", \"platform\": \"s72033_rp\", \"version\":", "'rtr_type': 'WS-C3750X-24S', 'chassis_sn': 'FDO1633Q14S', 'number_of_intfs': { 'Virtual Ethernet': '14', 'FastEthernet':", "number : B0 Model number : WS-C3750X-24P-L Daughterboard assembly number", "returned to ROM by reload at 10:26:47 EST Mon Dec", "WS-X6748-GE-TX , VID: V02, SN: SAL1128UPQ9 NAME: \"switching engine sub-module", "Mock(**self.empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsered_output = dir_obj.parse() def", ": 0x04 Switch Ports Model SW Version SW Image ------", "DESCR: \"X2 Transceiver 10Gbase-SR Te1/4\" PID: X2-10GB-SR , VID: V06", "2\", DESCR: \"16 Port 10BaseT/100BaseTX EtherSwitch\" PID: NM-16ESW , VID:", "Element\" PID: AIM-VPN/SSL-2 , VID: V01, SN: FOC2837465E '''} golden_parsed_output_7", "WS-C6503-E-FAN , VID: V02, SN: DCH183500KW NAME: \"PS 1 PWR-1400-AC\",", "\"descr\": \"WS-F6700-CFC Centralized Forwarding Card Rev. 4.1\", \"name\": \"WS-F6700-CFC Centralized", "Last reload type: Normal Reload Last reload reason: Reload Command", "\"V01\", } } }, } } }, \"WS-C6503-E-FAN 1\": {", "self.device = Mock(**self.golden_output_active_ipm) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0')", "'3825 chassis', 'pid': 'CISCO3825', 'sn': 'FTX7908A3RQ', 'vid': 'V05 ', },", ": AC:F2:C5:FF:55:E7 Motherboard assembly number : 73-13061-04 Motherboard serial number", "= { 'main': { 'chassis': { 'CISCO3945-CHASSIS': { 'descr': 'CISCO3945-CHASSIS',", "DESCR: \"2821 chassis\" PID: CISCO2821 , VID: V07 , SN:", "ShowPlatformPower,\\ ShowProcessesCpuHistory,\\ ShowProcessesCpuPlatform,\\ ShowPlatformSoftwareStatusControl,\\ ShowPlatformSoftwareSlotActiveMonitorMem,\\ ShowPlatformHardware,\\ ShowPlatformHardwarePlim,\\ ShowPlatformHardwareQfpBqsOpmMapping,\\ ShowPlatformHardwareQfpBqsIpmMapping,\\ ShowPlatformHardwareSerdes,\\", "\"FAN-MOD-4HS 1\": { \"name\": \"FAN-MOD-4HS 1\", \"descr\": \"High Speed Fan", "0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC65428K9F', 'vid': 'V01 ',", "sub-module of 1\", DESCR: \"VS-F6K-PFC4 Policy Feature Card 4 Rev.", "= ShowVersion(device=self.dev1) with self.assertRaises(AttributeError): parsered_output = version_obj.parse() def test_semi_empty(self): self.dev1", "PID: C3KX-PWR-007CBA , VID: V01L , SN: LTP13579L3R NAME: \"TenGigabitEthernet2/1/1\",", "switched to active is 10 weeks, 5 days, 5 hours,", "Inc. Compiled Mon 22-Jan-18 04:07 by prod_rel_team ROM: Bootstrap program", "\"sn\": \"FXS170802GL\", } } }, \"CLK-7600 2\": { \"other\": {", "self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) dir_obj = Dir(device=self.dev_iosv) parsed_output", "parsed_output = { 'version': { 'chassis': 'CISCO3945-CHASSIS', 'chassis_sn': 'FGL161010K8', 'compiled_by':", "sub-module of 1\", DESCR: \"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\" PID:", "DESCR: \"Stacking Module\" PID: C1010X-STACK , VID: V02 , SN:", "{ \"name\": \"3\", \"descr\": \"WS-X6824-SFP CEF720 24 port 1000mb SFP", "Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0\", DESCR:", "8FXS/DID on Slot 1', 'pid': 'EVM-HD-8FXS/DID', 'sn': 'FOC65798TG8', 'subslot': {", "} golden_output_9 = {'execute.return_value': ''' NAME: \"3845 chassis\", DESCR: \"3845", "\"Transceiver Te2/4\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/4\" PID: X2-10GB-SR ,", "ShowProcessesCpu,\\ ShowVersionRp,\\ ShowPlatform,\\ ShowPlatformPower,\\ ShowProcessesCpuHistory,\\ ShowProcessesCpuPlatform,\\ ShowPlatformSoftwareStatusControl,\\ ShowPlatformSoftwareSlotActiveMonitorMem,\\ ShowPlatformHardware,\\ ShowPlatformHardwarePlim,\\", "AC power supply for CISCO7604 1\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\",", "'9K66Z7TOKAACDEQA24N7S', 'vid': '1.0', }, }, }, } golden_output_iosv = {'execute.return_value':", "Meridian Technology Corp). X.25 software, Version 3.0.0. Bridging software. TN3270", "5 days, 5 hours, 16 minutes\", \"system_image\": \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\", \"chassis\": \"WS-C6503-E\",", "\"vid\": \"V06 \", } }, \"16\": { \"X2-10GB-SR\": { \"descr\":", "\"High Density Voice Module - 8FXS/DID\" PID: EVM-HD-8FXS/DID , VID:", "4 EARL sub-module of 2\", DESCR: \"WS-F6K-DFC4-E Distributed Forwarding Card", "= None self.assertEqual(parsed_output, self.golden_parsed_output_slot) def test_golden_subslot(self): self.device = Mock(**self.golden_output_subslot) obj", "\"Cisco Systems Cisco 6500 4-slot Chassis System\", \"pid\": \"WS-C6504-E\", \"vid\":", "self.golden_parsed_output_standby) def test_golden_standby_offline(self): self.device = Mock(**self.golden_output_standby_offline) obj = ShowVersionRp(device=self.device) self.maxDiff", "Active Location = slot 0 Current Software state = ACTIVE", "SAL17142D06 NAME: \"VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of", "golden_output_iosv = {'execute.return_value': '''\\ NAME: \"IOSv\", DESCR: \"IOSv chassis, Hw", "= obj.parse(slot='0') class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj", "1\" PID: CLK-7600 , VID: , SN: FXS170802GL NAME: \"CLK-7600", "17 2018 18:57:10 +00:00 nvram 269 -rw- 119 Oct 17", "\"Oct 17 2018 18:57:18 +00:00\", \"index\": \"269\", \"size\": \"119\", \"permissions\":", "System image file is \"flash:c3750e-universalk9-mz.152-2.E8.bin\" Last reload reason: Reload command", "Power Supply 1\" PID: PWR-3900-AC , VID: V03 , SN:", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None", "'name': '1', 'pid': 'WS-C1010XR-48FPS-I', 'sn': 'FD2043B0K3', 'subslot': { '1': {", "FRU 2\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\", } }", "}, } golden_output_5 = {'execute.return_value': ''' best-c3945-IOS3#show inventory NAME: \"CISCO3945-CHASSIS\",", "Compiled Mon 22-Jan-18 04:07 by prod_rel_team ROM: Bootstrap program is", "}, '1': { 'WIC-1B-U-V2': { 'descr': 'Wan Interface Card BRI", "test_golden_active_opm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_opm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll(", "self.assertRaises(SchemaEmptyParserError): parsed_output = cpu_platform_obj.parse() class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def test_empty(self): self.dev =", "'c3845 Motherboard with Gigabit Ethernet', 'name': 'c3845 Motherboard with Gigabit", "obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_3) def test_golden_output_4(self):", "Tray 1\", \"pid\": \"WS-C6503-E-FAN\", \"vid\": \"V02\", \"sn\": \"DCH183500KW\", } }", ", SN: FOC65428K9F NAME: \"Wan Interface Card BRI U (2091,", "BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 15.2(3r)E, RELEASE SOFTWARE (fc1)", "= obj.parse() class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def test_golden_port(self): self.device = Mock(**self.golden_output_port) obj", "{ \"other\": { \"CLK-7600 1\": { \"name\": \"CLK-7600 1\", \"descr\":", "= ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def", "Policy Feature Card 3 Rev. 1.8\", \"name\": \"switching engine sub-module", "\"SAL171848KL\", \"vid\": \"V04\", } } }, } } }, \"4\":", "'Wed 29-Mar-17 14:05', \"processor_type\": \"revision 1.0\", \"platform\": \"IOSv\", \"image_type\": \"production", "Engine 150 for Cisco 3900 ISR on Slot 0', 'pid':", "to show bootvar '''} def test_empty(self): self.dev = Mock(**self.empty_output) platform_obj", "10Gbase-SR Te1/4\", \"name\": \"Transceiver Te1/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T1\", \"vid\":", "'Two-Port Fast Ethernet High Speed WAN Interface Card on Slot", "PID: WS-SUP720-3BXL , VID: V05, SN: SAL11434P2C NAME: \"msfc sub-module", "or use encryption. Importers, exporters, distributors and users are responsible", "SubSlot 1\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk -", "Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)\", \"hostname\": \"cat6k_tb1\", \"uptime\":", "Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { \"active\": {", "test_show_switch_iosxe,\\ TestShowSwitchDetail as test_show_switch_detail_iosxe class TestShowVersion(unittest.TestCase): dev1 = Device(name='empty') dev_iosv", "Element', 'name': 'Virtual Private Network (VPN) Module on Slot 0',", "register is 0xF '''} golden_parsed_output_ios_1 = { 'version': {'version_short': '15.2',", "\"Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE", "Version 15.6(3)M2, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c)", "Power Supply 1', 'pid': 'PWR-3900-AC', 'sn': 'QCS1604P0BT', 'vid': 'V03 ',", "ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5\" PID:", "reset from s/w reset SuperLAT software (copyright 1990 by Meridian", "Engine 2T 10GE w/ CTS Rev. 1.5\", \"pid\": \"VS-SUP2T-10G\", \"vid\":", "Te2/3\" PID: X2-10GB-SR , VID: V06 , SN: ONT170202UU NAME:", "= {'execute.return_value': '''\\ show processes cpu sorted 5min | inc", "daughtercard', 'name': 'Six port FXO voice interface daughtercard on Slot", "\"sn\": \"DCH183500KW\", } } }, \"PS 1 PWR-1400-AC\": { \"other\":", "class TestShowVersion(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output =", "PID: PWR-3900-AC , VID: V03 , SN: QCS1604P0BT '''} golden_parsed_output_5", "def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowPlatform(device=self.dev1) with self.assertRaises(SchemaEmptyParserError):", "2019', 'returned_to_rom_by': 'reload', 'rom': 'System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE", "you require further assistance please contact us by sending email", "{ \"last_modified_date\": \"Mar 29 2017 00:00:00 +00:00\", \"index\": \"267\", \"size\":", "\"SM-ES2-16-P\" PID: SM-ES2-16-P , VID: , SN: FOC09876NP3 '''} golden_parsed_output_6", "{ \"WS-X6748-GE-TX\": { \"name\": \"2\", \"descr\": \"WS-X6748-GE-TX CEF720 48 port", "is in 'DISABLED' state '''} def test_empty(self): self.dev1 = Mock(**self.empty_output)", "test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowModule(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "\"Encryption AIM Element\" PID: AIM-VPN/SSL-3 , VID: V01, SN: FOC758693YO", "'sn': 'FOC65798TG8', 'subslot': { '1': { 'EM-HDA-6FXO': { 'descr': 'Six", "\"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te1/5\", \"name\": \"Transceiver Te1/5\",", "version_obj.parse() self.assertEqual(parsed_output, self.parsed_output) class test_dir(unittest.TestCase): dev1 = Device(name='empty') dev_iosv =", "Policy Feature Card 3 Rev. 1.8\" PID: WS-F6K-PFC3BXL , VID:", "2\", DESCR: \"WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1\" PID:", "'subslot': { '1': { 'EM-HDA-6FXO': { 'descr': 'Six port FXO", "'name': 'Switch 1 - Power Supply 1', 'pid': 'PWR-C2-2929WAC', 'sn':", "device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='opm') self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) class", "CEF720 16 port 10GE Rev. 2.0\", \"pid\": \"WS-X6816-10GE\", \"vid\": \"V02\",", "1 PWR-2700-AC/4\", DESCR: \"2700W AC power supply for CISCO7604 1\"", "0.54% 0.48% 0 PIM Process 84 3582279 1466728 2442 0.55%", "board ID FXS1821Q2H9 SR71000 CPU at 600Mhz, Implementation 0x504, Rev", "'LIT03728KKK', 'vid': 'V02L ', }, }, '1/0/49': { 'GLC-SX-MMD': {", "uptime is 9 weeks, 4 days, 2 hours, 3 minutes", "Rev. 3.1\", \"name\": \"msfc sub-module of 1\", \"pid\": \"WS-SUP720\", \"sn\":", "'mem_size': { 'non-volatile configuration': '255', }, 'number_of_intfs': { 'FastEthernet': '2',", ": FDO202823P8 System serial number : FDO2028F1WK Top Assembly Part", "1\", DESCR: \"Enhanced 3-slot Fan Tray 1\" PID: WS-C6503-E-FAN ,", "} }, \"CLK-7600 2\": { \"other\": { \"CLK-7600 2\": {", "slot='0', iotype='opm') self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def test_empty(self): self.device =", "VID: V03, SN: SAL17163901 NAME: \"Transceiver Te1/4\", DESCR: \"X2 Transceiver", "Sat Aug 28 2010 (SP by power on) System image", "empty_output = {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\\ ROM: Bootstrap", "FXS181101V4 NAME: \"CLK-7600 2\", DESCR: \"OSR-7600 Clock FRU 2\" PID:", "'license_level': 'None', 'license_type': 'None', 'next_reload_license_level': 'None', }, }, 'main_mem': '2027520',", "}, 'vid': 'V05 ', }, }, }, '2': { 'rp':", "FOC09876NP3 '''} golden_parsed_output_6 = { 'slot': { '1': { 'lc':", "ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(subslot='0/1') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_subslot) def", "self.device = Mock(**self.golden_output_active_ipm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse(", "\"descr\": \"WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8\", \"name\": \"switching", "42 minutes System returned to ROM by reload System image", "}, }, }, 'vid': 'V05 ', }, }, }, '2':", "{ 'descr': 'Cisco Services Performance Engine 150 for Cisco 3900", "\"2700W AC power supply for CISCO7604 1\", \"pid\": \"PWR-2700-AC/4\", \"vid\":", "\"descr\": \"X2 Transceiver 10Gbase-SR Te1/5\", \"name\": \"Transceiver Te1/5\", \"pid\": \"X2-10GB-SR\",", "Distributed Forwarding Card 4 EARL sub-module of 2\", \"pid\": \"WS-F6K-DFC4-E\",", "'CISCO3945-CHASSIS', 'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at': '10:27:57 EST Mon Dec 9 2019',", "\"2142715904\", \"bytes_free\": \"1989595136\" }, \"dir\": \"flash0:/\" } } golden_output_iosv =", "Interface Card', 'name': 'Two-Port Fast Ethernet High Speed WAN Interface", "supply, 1400 watt 1\", \"pid\": \"PWR-1400-AC\", \"vid\": \"V01\", \"sn\": \"ABC0830J127\",", "- 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0", "Rev. 4.1\", \"name\": \"WS-F6700-CFC Centralized Forwarding Card EARL sub-module of", "def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) dir_obj =", "'pid': 'CISCO3825', 'sn': 'FTX7908A3RQ', 'vid': 'V05 ', }, }, },", "', }, }, }, '16': { 'lc': { 'NM-16ESW': {", "boot loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE", "} } }, \"slot\": { \"CLK-7600 1\": { \"other\": {", "laws and regulations. If you are unable to comply with", "and local laws, return this product immediately. A summary of", "Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC65428K9F', 'vid': 'V01", "'255', }, 'number_of_intfs': { 'FastEthernet': '2', 'Gigabit Ethernet': '3', },", "Aug 28 2010', 'returned_to_rom_by': 'power cycle', \"rtr_type\": \"WS-C6503-E\", \"chassis_sn\": \"FXS1821Q2H9\",", "\"OSPF-1 Hello\", \"five_min_cpu\": 0.07, \"runtime\": 113457, \"pid\": 412, \"five_sec_cpu\": 0.15", "\"3\": { \"lc\": { \"WS-X6824-SFP\": { \"name\": \"3\", \"descr\": \"WS-X6824-SFP", "\"V01\", }, } }, } } }, \"2\": { \"lc\":", "(C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1) sample_switch uptime is 8", "\"PVDMII DSP SIMM with four DSPs\" PID: PVDM2-64 , VID:", "''} golden_parsed_output = { \"five_sec_cpu_total\": 13, \"five_min_cpu\": 15, \"one_min_cpu\": 23,", "'flash:c3750e-universalk9-mz', 'system_restarted_at': '12:22:21 PDT Mon Sep 10 2018', 'uptime': '9", "'version': '15.0(1)M7', 'version_short': '15.0', }, } def test_empty(self): self.dev1 =", "\"index\": \"267\", \"size\": \"147988420\", \"permissions\": \"-rw-\" } }, \"bytes_total\": \"2142715904\",", "Centralized Forwarding Card Rev. 4.1\" PID: WS-F6700-CFC , VID: V06,", "Importers, exporters, distributors and users are responsible for compliance with", "image file is \"flash:c3750e-universalk9-mz.152-2.E8.bin\" Last reload reason: Reload command This", "'uc': { 'license_level': 'None', 'license_type': 'None', 'next_reload_license_level': 'None', }, },", "test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdesInternal(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "}, \"4\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/4\",", "None self.device = Mock(**self.golden_output_9) obj = ShowInventory(device=self.device) parsed_output = obj.parse()", "}, \"1\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/1\",", "reload reason: Reload Command This product contains cryptographic features and", "ROM: Bootstrap program is C3750E boot loader BOOTLDR: C3750E Boot", "Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport", "'vid': 'V02 ', }, }, }, 'vid': 'V05 ', },", "'3845 chassis', 'name': '3845 chassis', 'pid': 'CISCO3845', 'sn': 'FTX6666ARJ9', 'vid':", "'descr': 'LLL Power Supply', 'name': 'Switch 1 - Power Supply", "NAME: \"C3900 AC Power Supply 1\", DESCR: \"C3900 AC Power", "Card 3 Rev. 1.8\" PID: WS-F6K-PFC3BXL , VID: V01, SN:", "\"files\": { \"e1000_bia.txt\": { \"last_modified_date\": \"Oct 17 2018 18:57:18 +00:00\",", "four DSPs on Slot 0 SubSlot 4\", DESCR: \"PVDMII DSP", "4-slot Chassis System\", \"pid\": \"WS-C6504-E\", \"vid\": \"V01\", \"sn\": \"FXS1712Q1R8\", }", "\"pid\": \"WS-F6K-PFC3BXL\", \"sn\": \"SAL11434LYG\", \"vid\": \"V01\", }, } }, }", "'image_type': 'production image', 'compiled_date': 'Mon 22-Jan-18 04:07', 'compiled_by': 'prod_rel_team', 'rom':", ", SN: ONT170201TT NAME: \"3\", DESCR: \"WS-X6824-SFP CEF720 24 port", "802.3': '1' }, \"mem_size\": {\"non-volatile configuration\": \"1917\", \"packet buffer\": \"8192\"},", "\"flash:c3750e-universalk9-mz.152-2.E8.bin\" Last reload reason: Reload command This product contains cryptographic", "29 2017 00:00:00 +00:00\", \"index\": \"267\", \"size\": \"147988420\", \"permissions\": \"-rw-\"", ": W0 Motherboard revision number : B0 Model number :", "1\", \"descr\": \"OSR-7600 Clock FRU 1\", \"pid\": \"CLK-7600\", \"vid\": \"\",", "\"next_reload_boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\" } golden_output_iosv = {'execute.return_value': '''\\ BOOT variable =", "1\", DESCR: \"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\" PID: VS-F6K-MSFC5 ,", "sub-module of 1\", DESCR: \"WS-F6K-PFC3BXL Policy Feature Card 3 Rev.", "CISCO7604 1\", \"pid\": \"FAN-MOD-4HS\", \"vid\": \"V01\", \"sn\": \"DCH170900PF\", } }", "platform_obj = ShowBootvar(device=self.dev_iosv) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_processes_cpu_sorted_CPU(unittest.TestCase):", "at 10:27:57 EST Mon Dec 9 2019 System image file", "parsed_output, self.golden_parsed_output_serdes_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdesInternal(device=self.device1)", "CompactFlash 0 (Read/Write) 0K bytes of ATA CompactFlash 1 (Read/Write)", "'''} golden_parsed_output_5 = { 'main': { 'chassis': { 'CISCO3945-CHASSIS': {", "returned to ROM by power-on System restarted at 12:22:21 PDT", "golden_output_7 = {'execute.return_value': ''' NAME: \"2821 chassis\", DESCR: \"2821 chassis\"", "'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'next_reload_license_level': 'ipservices', 'chassis': 'WS-C3750X-24S',", "CompactFlash 1 (Read/Write) 0K bytes of ATA CompactFlash 2 (Read/Write)", "\"X2-10GB-SR\", \"sn\": \"ONT1702033D\", \"vid\": \"V06 \", } }, }, }", "DESCR: \"PVDMII DSP SIMM with four DSPs\" PID: PVDM2-64 ,", "\"WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8\", \"name\": \"switching engine", "self.golden_parsed_output_iosv) class test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev = Device(name='c3850') empty_output = {'execute.return_value': ''}", "parsed_output = version_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv =", "{ 'SM-ES2-16-P': { 'descr': 'SM-ES2-16-P', 'name': '1', 'pid': 'SM-ES2-16-P', 'sn':", "- FlexStackPlus Module\", DESCR: \"Stacking Module\" PID: C1010X-STACK , VID:", "ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_3) def test_golden_output_4(self): self.maxDiff =", "{ \"WS-C6503-E\": { \"name\": \"WS-C6503-E\", \"descr\": \"Cisco Systems Catalyst 6500", "\"descr\": \"2700W AC power supply for CISCO7604 2\", \"pid\": \"PWR-2700-AC/4\",", "minutes\", \"system_image\": \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\", \"chassis\": \"WS-C6503-E\", \"main_mem\": \"983008\", \"processor_type\": \"R7000\", 'sp_by':", "\"sn\": \"DCH170900PF\", } } }, \"PS 1 PWR-2700-AC/4\": { \"other\":", "distributors and users are responsible for compliance with U.S. and", "} } }, \"1\": { \"rp\": { \"VS-SUP2T-10G\": { \"name\":", "golden_parsed_output_ios_cat6k = { \"version\": { \"os\": \"IOS\", \"version_short\": \"12.2\", \"platform\":", "SAL11434P2C NAME: \"msfc sub-module of 1\", DESCR: \"WS-SUP720 MSFC3 Daughterboard", "\"uptime\": \"1 day, 16 hours, 42 minutes\" } } golden_output_iosv", "Mock(**self.empty_output) obj = ShowVersionRp(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class", "VID: V01, SN: FXS1712Q1R8 NAME: \"CLK-7600 1\", DESCR: \"OSR-7600 Clock", "self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') def test_golden(self): self.maxDiff", "Package License Information for Module:'c3900' ----------------------------------------------------------------- Technology Technology-package Technology-package Current", "} } golden_output_ios = {'execute.return_value': '''\\ Cisco IOS Software, C3750E", "self.dev_iosv = Mock(**self.golden_output_iosv) dir_obj = Dir(device=self.dev_iosv) parsed_output = dir_obj.parse() self.assertEqual(parsed_output,", "for CISCO7604 1\" PID: FAN-MOD-4HS , VID: V01, SN: DCH170900PF", "{ \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te1/4\", \"name\": \"Transceiver", "Te1/5\", \"name\": \"Transceiver Te1/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702033D\", \"vid\": \"V06", "\"1917\", \"packet buffer\": \"8192\"}, \"curr_config_register\": \"0x2102\", } } golden_output_ios_cat6k =", "= Mock(**self.golden_output_slot_internal) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0', internal=True) self.maxDiff", "VID: V04, SN: SAL14017TWF NAME: \"WS-F6700-CFC Centralized Forwarding Card EARL", "state = ACTIVE Uptime in current state = 1 day,", "platform_obj.parse(status='active') def test_golden_active(self): self.maxDiff = None self.device = Mock(**self.golden_output_active) platform_obj", "self.golden_parsed_output) class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj =", "class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev)", "= None self.device = Mock(**self.golden_output_active) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) parsed_output", "{ \"WS-C6503-E-FAN 1\": { \"name\": \"WS-C6503-E-FAN 1\", \"descr\": \"Enhanced 3-slot", "'FOC98675U0D', 'vid': 'V01 ', }, }, '1': { 'VWIC2-2MFT-T1/E1': {", "agree to comply with applicable laws and regulations. If you", "power supply, 1400 watt 1\", \"pid\": \"PWR-1400-AC\", \"vid\": \"V01\", \"sn\":", "\"SAL14017TWF\", \"subslot\": { \"0\": { \"WS-F6700-CFC\": { \"descr\": \"WS-F6700-CFC Centralized", "'image_id': 'C3750E-UNIVERSALK9-M', 'os': 'IOS', 'image_type': 'production image', 'compiled_date': 'Mon 22-Jan-18", "- T1/E1 on Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn':", "parsed_output = dir_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv =", "Speed Fan Module for CISCO7604 1\", \"pid\": \"FAN-MOD-4HS\", \"vid\": \"V01\",", "None self.device = Mock(**self.golden_output_6) obj = ShowInventory(device=self.device) parsed_output = obj.parse()", "minutes: 15% '''} golden_parsed_output_1 = { \"sort\": { 1: {", "ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_ios(self): self.maxDiff =", "(1989595136 bytes free) '''} def test_empty(self): self.dev1 = Mock(**self.empty_output) dir_obj", "\"X2 Transceiver 10Gbase-SR Te1/5\" PID: X2-10GB-SR , VID: V06 ,", "'''} golden_parsed_output_iosv = { \"version\": { \"last_reload_reason\": \"Unknown reason\", \"hostname\":", "'V02 ', }, }, }, 'vid': 'V05 ', }, },", "'data': { 'license_level': 'datak9', 'license_type': 'Permanent', 'next_reload_license_level': 'datak9', }, 'ipbase':", "SN: FOC16062824 NAME: \"C3900 AC Power Supply 1\", DESCR: \"C3900", "1 SubSlot 1\", DESCR: \"Six port FXO voice interface daughtercard\"", "= ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios) def test_golden_ios_cat6k(self): self.maxDiff", "{ 'IOSv': { 'descr': 'IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw", "'GLC-SX-MMD': { 'descr': '1000BaseSX SFP', 'name': 'GigabitEthernet1/0/49', 'pid': 'GLC-SX-MMD', 'sn':", "48 port 10/100/1000mb Ethernet Rev. 3.4\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V04\",", "}, }, 'vid': 'V02 ', }, }, }, }, }", "\"WS-C6503-E\", \"vid\": \"V03\", \"sn\": \"FXS1821Q2H9\", } } }, \"slot\": {", "SN: QCS1604P0BT '''} golden_parsed_output_5 = { 'main': { 'chassis': {", ", VID: V01, SN: DCH170900PF NAME: \"PS 1 PWR-2700-AC/4\", DESCR:", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_2) def test_golden_output_3(self): self.maxDiff = None self.device =", "self.golden_parsed_output_ios_1) def test_golden_ios_2(self): self.maxDiff = None self.dev_iosv = Mock(**self.device_output) version_obj", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_5) def test_golden_output_6(self): self.maxDiff = None self.device", "}, } }, } } }, \"2\": { \"lc\": {", "0 OSPF-1 Hello '''} def test_empty(self): self.dev = Mock(**self.empty_output) obj", "chassis\" PID: CISCO2821 , VID: V07 , SN: FTX1234AMWT NAME:", "ShowBootvar(device=self.dev_iosv) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev =", "3 minutes', 'version': '12.2(55)SE8', 'version_short': '12.2' } } golden_output_ios =", "= ShowPlatformSoftwareStatusControl(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff", "{'execute.return_value': '''\\ NAME: \"IOSv\", DESCR: \"IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S,", "self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_redundancy(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv')", "= ShowProcessesCpuHistory(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_2) def test_golden_output_3(self): self.maxDiff = None self.device", "\"435457\", \"mem_size\": { \"non-volatile configuration\": \"256\" }, \"system_image\": \"flash0:/vios-adventerprisek9-m\", \"curr_config_register\":", ": B0 Version ID : V03 CLEI Code Number :", "s/w reset SuperLAT software (copyright 1990 by Meridian Technology Corp).", "Card 4 EARL sub-module of 1\", \"pid\": \"VS-F6K-PFC4\", \"sn\": \"SAL17163901\",", "Revision Number : 0x04 Switch Ports Model SW Version SW", "Rev. 1.8\", \"name\": \"switching engine sub-module of 1\", \"pid\": \"WS-F6K-PFC3BXL\",", "uptime is 1 hour, 20 minutes System returned to ROM", "V01, SN: SAL11434LYG NAME: \"2\", DESCR: \"WS-X6748-GE-TX CEF720 48 port", "ShowVersionRp(device=self.device) self.maxDiff = None with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(rp='standby', status='running')", "self.assertEqual(parsed_output, self.golden_parsed_output_6) def test_golden_output_7(self): self.maxDiff = None self.device = Mock(**self.golden_output_7)", "\"one_min_cpu\": 23, \"five_sec_cpu_interrupts\": 0 } golden_output = {'execute.return_value': '''\\ show", "NAME: \"1\", DESCR: \"VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE", "ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff =", "= ShowEnvironment(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff", "'compiled_by': 'prod_rel_team', 'rom': 'Bootstrap program is C3750E boot loader', 'bootldr':", "Processor board ID FXS1821Q2H9 SR71000 CPU at 600Mhz, Implementation 0x504,", "\"sn\": \"SAL13516QS8\", \"vid\": \"V06\", } } }, } } },", "Configuration register is 0x2102 '''} golden_output_ios_1 = {'execute.return_value': '''\\ Cisco", ", SN: ACW102938VS '''} golden_parsed_output_4 = { 'slot': { '1':", "\"V03\", \"sn\": \"APS1707008Y\", } } }, \"PS 2 PWR-2700-AC/4\": {", "golden_output_6 = {'execute.return_value': ''' NAME: \"1\", DESCR: \"SM-ES2-16-P\" PID: SM-ES2-16-P", "Forwarding Card Rev. 4.1\", \"name\": \"WS-F6700-CFC Centralized Forwarding Card EARL", "SN: SAL1128UPQ9 NAME: \"switching engine sub-module of 2\", DESCR: \"WS-F6700-DFC3CXL", "\"sn\": \"SAL11434N9G\", \"vid\": \"\", }, \"WS-F6K-PFC3BXL\": { \"descr\": \"WS-F6K-PFC3BXL Policy", "parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def", "}, }, }, 'vid': 'V04 ', }, }, }, },", "CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6\", \"pid\": \"WS-X6748-GE-TX\", \"vid\":", "= {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\\ Directory of flash:/", "268 -rw- 524288 Oct 17 2018 18:57:10 +00:00 nvram 269", "= ShowModule(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff", "with self.assertRaises(SchemaMissingKeyError): parsed_output = dir_obj.parse() def test_golden_iosv(self): self.maxDiff = None", "Address : AC:F2:C5:FF:55:E7 Motherboard assembly number : 73-13061-04 Motherboard serial", "parsed_output = obj.parse(slot='0', internal=True) self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_slot_internal)", "self.golden_parsed_output_active_opm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with", "SubSlot 1', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675W3E', 'vid': 'V01 ', },", "Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1) BOOTLDR: s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M),", "is \"flash:c3750e-universalk9-mz.152-2.E8.bin\" Last reload reason: Reload command This product contains", "self.maxDiff = None self.dev_asr1k = Mock(**self.golden_output_asr1k) platform_obj = ShowPlatform(device=self.dev_asr1k) parsed_output", "1\", DESCR: \"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\" PID: WS-SUP720 ,", "from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as test_show_platform_iosxe,\\ TestShowPlatformPower as test_show_platform_power_iosxe,\\ TestShowVersionRp", "Forwarding Card 4 Rev. 1.0\" PID: WS-F6K-DFC4-A , VID: V04,", "Cisco IOS Software, C3900 Software (C3900-UNIVERSALK9-M), Version 15.0(1)M7, RELEASE SOFTWARE", "test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) version_obj = ShowVersion(device=self.dev_iosv)", "Slot 0\", DESCR: \"c3845 Motherboard with Gigabit Ethernet\" PID: CISCO3845-MB", "Te1/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702033D\", \"vid\": \"V06 \", } },", "sub-module of 4\", DESCR: \"WS-F6700-CFC Centralized Forwarding Card Rev. 4.1\"", "obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='standby', status='running') self.maxDiff = None", "= Mock(**self.empty_output) platform_obj = ShowPlatformPower(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse()", "= ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware(test_show_platform_hardware_iosxe): def", "class test_show_switch_detail(test_show_switch_detail_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitchDetail(device=self.dev1)", "bytes of ATA CompactFlash 2 (Read/Write) 10080K bytes of ATA", "Supply 1\", DESCR: \"ABC Power Supply\" PID: C3KX-PWR-350WAC , VID:", "Mar 29 2017 00:00:00 +00:00 vios-adventerprisek9-m 268 -rw- 524288 Oct", "'AIM-VPN/SSL-2': { 'descr': 'Encryption AIM Element', 'name': 'Virtual Private Network", "CPU Daughterboard Rev. 2.0\" PID: VS-F6K-MSFC5 , VID: , SN:", "hour, 20 minutes System returned to ROM by reload at", "'version': '12.2(55)SE8', 'version_short': '12.2' } } golden_output_ios = {'execute.return_value': '''\\", "\"sn\": \"FXS1821Q2H9\", } } }, \"slot\": { \"CLK-7600 1\": {", "\"pid\": \"WS-X6816-10GE\", \"vid\": \"V02\", \"sn\": \"SAL17152QB3\", \"subslot\": { \"0\": {", "C3750E Software (C3750E-UNIVERSALK9-M), Version 12.2(55)SE8, RELEASE SOFTWARE (fc2) Technical Support:", "'last_reload_type': 'Normal Reload', 'license_udi': { 'device_num': { '*0': { 'pid':", "DESCR: \"Encryption AIM Element\" PID: AIM-VPN/SSL-3 , VID: V01, SN:", "is 1 hour, 20 minutes System returned to ROM by", "\"0\" }, \"slot\": { \"slot 0\": { \"image_ver\": \"Cisco IOS", "Mock(**self.empty_output) platform_obj = ShowPlatformPower(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def", "{'execute.return_value': '''\\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 12.2(55)SE8,", "PWR-1400-AC\": { \"name\": \"PS 1 PWR-1400-AC\", \"descr\": \"AC power supply,", "= ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k) def test_golden_ios_1(self): self.maxDiff", "test_dir(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value':", "Dir(device=self.dev_iosv) parsed_output = dir_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_redundancy(unittest.TestCase): dev1 =", "}, \"mem_size\": {\"non-volatile configuration\": \"1917\", \"packet buffer\": \"8192\"}, \"curr_config_register\": \"0x2102\",", "= Mock(**self.semi_empty_output) platform_obj = ShowPlatform(device=self.dev2) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse()", "1', 'pid': 'WIC-1B-U-V2', 'sn': '10293847', 'vid': 'V01', }, }, '4':", "Feature Card 3 Rev. 1.8\" PID: WS-F6K-PFC3BXL , VID: V01,", "self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output,", "loader', 'rtr_type': 'WS-C3750X-24P', 'system_image': 'flash:c3750e-universalk9-mz', 'system_restarted_at': '12:22:21 PDT Mon Sep", "self.device = Mock(**self.golden_output_active_opm) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0')", "= { \"main\": { \"chassis\": { \"WS-C6503-E\": { \"name\": \"WS-C6503-E\",", "X2-10GB-SR , VID: V06 , SN: ONT170201TT NAME: \"3\", DESCR:", "self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output", "class test_show_bootvar(unittest.TestCase): dev = Device(name='ios') dev_iosv = Device(name='iosv') empty_output =", "{ \"descr\": \"X2 Transceiver 10Gbase-SR Te2/5\", \"name\": \"Transceiver Te2/5\", \"pid\":", "T1/E1\" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC65428K9F NAME:", "Model number : WS-C3750X-24S-E Daughterboard assembly number : 800-32727-03 Daughterboard", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_golden_standby(self): self.device = Mock(**self.golden_output_standby)", "sub-module of 1\", DESCR: \"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\" PID:", "by prod_rel_team Image text-base: 0x00003000, data-base: 0x02800000 ROM: Bootstrap program", "Hello\" ], \"five_sec_cpu_interrupts\": 0 } golden_output_1 = {'execute.return_value': ''' CPU", "V06 , SN: ONT170202T5 NAME: \"Transceiver Te2/5\", DESCR: \"X2 Transceiver", "please contact us by sending email to <EMAIL>. License Level:", "DESCR: \"Cisco Systems Cisco 6500 4-slot Chassis System\" PID: WS-C6504-E", "Mon Dec 9 2019 System image file is \"flash0:c3900-universalk9-mz.SPA.150-1.M7.bin\" Last", "\"CLK-7600 1\": { \"name\": \"CLK-7600 1\", \"descr\": \"OSR-7600 Clock FRU", "= platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def", "Gigabit Ethernet': '2' }, 'mem_size': { 'flash-simulated non-volatile configuration': '512'", "6500 3-slot Chassis System\", \"pid\": \"WS-C6503-E\", \"vid\": \"V03\", \"sn\": \"FXS1821Q2H9\",", "EtherSwitch NM', 'name': 'Gigabit(1000BaseT) module for EtherSwitch NM on Slot", "} } device_output = {'execute.return_value':''' best-c3945-IOS3#show version Cisco IOS Software,", "\"Transceiver Te2/2\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/2\" PID: X2-10GB-SR ,", "\"WS-SUP720\", \"sn\": \"SAL11434N9G\", \"vid\": \"\", }, \"WS-F6K-PFC3BXL\": { \"descr\": \"WS-F6K-PFC3BXL", "{ 'descr': 'SFP-10GBase-LR', 'name': 'TenGigabitEthernet2/1/1', 'pid': 'SFP-10G-LR', 'sn': 'ONT182746GZ', 'vid':", "minutes Image Version = Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M),", "Card', 'name': 'Two-Port Fast Ethernet High Speed WAN Interface Card", "DESCR: \"X2 Transceiver 10Gbase-SR Te2/16\" PID: X2-10GB-SR , VID: V06", "of 2\", DESCR: \"WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2\"", "by sending email to <EMAIL>. License Level: ipservices License Type:", "PID: WIC-1B-U-V2 , VID: V01, SN: 10293847 NAME: \"PVDMII DSP", "Switchovers system experienced = 0 Standby failures = 0 Last", "SN: ONT170202T5 NAME: \"Transceiver Te2/5\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/5\"", "Revision: 1.0', 'name': 'IOSv', 'pid': 'IOSv', 'sn': '9K66Z7TOKAACDEQA24N7S', 'vid': '1.0',", "}, 'vid': 'V04 ', }, }, }, }, } golden_output_9", "Mock(**self.golden_output_active_opm) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff =", "(2091, 3086)\" PID: WIC-1B-U-V2 , VID: V01, SN: 10293847 NAME:", "Rev. 1.5\" PID: VS-SUP2T-10G , VID: V05, SN: SAL17152N0F NAME:", "\"WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0\" PID: WS-X6824-SFP", "Supply 1', 'name': 'C3900 AC Power Supply 1', 'pid': 'PWR-3900-AC',", "Speed Fan Module for CISCO7604 1\" PID: FAN-MOD-4HS , VID:", "\"descr\": \"X2 Transceiver 10Gbase-SR Te2/5\", \"name\": \"Transceiver Te2/5\", \"pid\": \"X2-10GB-SR\",", "'''\\ Directory of flash0:/ 1 drw- 0 Jan 30 2013", "ShowInventory,\\ ShowBootvar, \\ ShowProcessesCpuSorted,\\ ShowProcessesCpu,\\ ShowVersionRp,\\ ShowPlatform,\\ ShowPlatformPower,\\ ShowProcessesCpuHistory,\\ ShowProcessesCpuPlatform,\\", "cisco WS-C3750X-24P (PowerPC405) processor (revision W0) with 262144K bytes of", "NAME: \"Transceiver Te1/5\", DESCR: \"X2 Transceiver 10Gbase-SR Te1/5\" PID: X2-10GB-SR", "\"tty\": 0, \"one_min_cpu\": 0.54, \"process\": \"PIM Process\", \"five_min_cpu\": 0.48, \"runtime\":", "= ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0', internal=True) self.maxDiff = None self.assertEqual(", "1 Virtual Ethernet/IEEE 802.3 interface 50 Gigabit Ethernet/IEEE 802.3 interfaces", "0x04 Switch Ports Model SW Version SW Image ------ -----", "self.dev2 = Mock(**self.semi_empty_output) platform_obj = ShowPlatform(device=self.dev2) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "number : A0 Motherboard revision number : A0 Model number", "SN: APS1707008Y NAME: \"PS 2 PWR-2700-AC/4\", DESCR: \"2700W AC power", "'rp': { 'C3900-SPE150/K9': { 'descr': 'Cisco Services Performance Engine 150", "12.2(17r)S4, RELEASE SOFTWARE (fc1)\", \"bootldr\": \"s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7,", "you are unable to comply with U.S. and local laws,", "Clock FRU 2\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\", }", "'subslot': { '3': { 'HWIC-2FE': { 'descr': 'Two-Port Fast Ethernet", "obj.parse(rp='standby', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_standby) def test_golden_standby_offline(self): self.device", "'''} def test_empty(self): self.dev1 = Mock(**self.empty_output) redundancy_obj = ShowRedundancy(device=self.dev1) with", "parsed_output = obj.parse(subslot='0/1') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_subslot) def test_golden_slot_internal(self):", "TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\\ TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\\ TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\\ TestShowPlatformHardwareSerdesStatisticsInternal", ": WS-C3750X-24P-L Daughterboard assembly number : 800-32727-03 Daughterboard serial number", "version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.parsed_output) class test_dir(unittest.TestCase):", ": 0x05 Switch Ports Model SW Version SW Image ------", "\"TenGigabitEthernet1/1/1\", DESCR: \"SFP-10GBase-SR\" PID: SFP-10G-SR , VID: V03 , SN:", "'PVDM2-64', 'sn': 'FOC63358WSI', 'vid': 'V01 ', }, }, }, },", "by Cisco Systems, Inc. Compiled Fri 05-Aug-11 00:32 by prod_rel_team", "\"15.6\", \"number_of_intfs\": { \"Gigabit Ethernet\": \"6\" }, \"version\": \"15.6(3)M2\", \"rtr_type\":", "users are responsible for compliance with U.S. and local country", "\"1 day, 16 hours, 42 minutes\", \"config_register\": \"0x0\", \"curr_sw_state\": \"ACTIVE\"", "DESCR: \"3825 chassis\" PID: CISCO3825 , VID: V05 , SN:", "CMMFF00ARC Hardware Board Revision Number : 0x04 Switch Ports Model", "test_show_platform_power_iosxe,\\ TestShowVersionRp as test_show_version_rp_iosxe,\\ TestShowProcessesCpu as test_show_processes_cpu_iosxe,\\ TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\\", "------------------------------------------------------------------ ipbase ipbasek9 Permanent ipbasek9 security securityk9 Permanent securityk9 uc", "\"V01\", \"sn\": \"FXS1712Q1R8\", } } }, \"slot\": { \"CLK-7600 1\":", "\"OSR-7600 Clock FRU 1\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\",", "'LLL Power Supply', 'name': 'Switch 1 - Power Supply 1',", "test_show_version_rp(test_show_version_rp_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowVersionRp(device=self.device) parsed_output", "1\": { \"name\": \"WS-C6503-E-FAN 1\", \"descr\": \"Enhanced 3-slot Fan Tray", "}, 'vid': 'V01 ', }, }, }, }, } def", "def test_golden_output_4(self): self.maxDiff = None self.device = Mock(**self.golden_output_4) obj =", "Reload Command This product contains cryptographic features and is subject", "15, \"one_min_cpu\": 23, \"five_sec_cpu_interrupts\": 0 } golden_output = {'execute.return_value': '''\\", "V01, SN: SAL17152EG9 NAME: \"WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL", "0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC65428K9F', 'vid': 'V01 ', }, },", "= Mock(**self.golden_output_active_ipm) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff", "0x02800000 ROM: Bootstrap program is C3750E boot loader BOOTLDR: C3750E", "Virtual Ethernet interfaces 1 FastEthernet interface 28 Gigabit Ethernet interfaces", "= None self.assertEqual(parsed_output, self.golden_parsed_output_serdes) def test_empty(self): self.device1 = Mock(**self.empty_output) obj", "V01 , SN: FOC63358WSI NAME: \"High Density Voice Module -", "self.golden_parsed_output) class test_show_module(test_show_module_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj =", "\"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/1\", \"name\": \"Transceiver Te2/1\",", "SN: ONT170201TT NAME: \"3\", DESCR: \"WS-X6824-SFP CEF720 24 port 1000mb", "{ 'descr': 'BCA Power Supply', 'name': 'Switch 2 - Power", "self.golden_parsed_output) class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) cpu_platform_obj =", "Process\", \"five_min_cpu\": 0.48, \"runtime\": 362874, \"pid\": 368, \"five_sec_cpu\": 1.03 },", "\"name\": \"Transceiver Te1/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T1\", \"vid\": \"V06 \",", "with 2027520K/69632K bytes of memory. Processor board ID FGL161010K8 2", "self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) cpu_platform_obj", "'C3KX-PWR-350WAC': { 'descr': 'ABC Power Supply', 'name': 'Switch 1 -", "Mock(**self.golden_output_9) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_9) class", "of 1\", \"pid\": \"VS-F6K-PFC4\", \"sn\": \"SAL17163901\", \"vid\": \"V03\", }, },", "(2091, 3086) on Slot 0 SubSlot 1', 'pid': 'WIC-1B-U-V2', 'sn':", "flash-simulated non-volatile configuration memory. Base ethernet MAC Address : 84:3D:C6:FF:F1:B8", "device_output = {'execute.return_value':''' best-c3945-IOS3#show version Cisco IOS Software, C3900 Software", "', }, }, }, 'C3900 AC Power Supply 1': {", "\"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\", \"configuration_register\": \"0x2012\" }, \"next_reload_boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\" } golden_output_iosv = {'execute.return_value':", "PWR-1400-AC\": { \"other\": { \"PS 1 PWR-1400-AC\": { \"name\": \"PS", "'pid': 'AIM-VPN/SSL-3', 'sn': 'FOC758693YO', 'vid': 'V01', }, }, }, '1':", "Clock FRU 1\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\", }", "Card 4 EARL sub-module of 2\", DESCR: \"WS-F6K-DFC4-E Distributed Forwarding", "'flash-simulated non-volatile configuration': '512' }, 'curr_config_register': '0xF' } } device_output", "= BOOTLDR variable = Configuration register is 0x2012 Standby not", "{'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\\ ROM: Bootstrap program is", "\"0\": { \"WS-SUP720\": { \"descr\": \"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\",", "{ 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1',", "TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\\ TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\\ TestShowPlatformHardware", "configuration': '512' }, 'curr_config_register': '0xF' } } device_output = {'execute.return_value':'''", "Processor board ID FDO1633Q14S Last reset from power-on 14 Virtual", "'3845 chassis', 'pid': 'CISCO3845', 'sn': 'FTX6666ARJ9', 'vid': 'V05 ', },", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware(test_show_platform_hardware_iosxe): def test_golden_active(self): self.device", "Boot Loader (C3750X-HBOOT-M) Version ' '15.2(3r)E, RELEASE SOFTWARE (fc1)', 'chassis':", "SN: SAL17163901 NAME: \"Transceiver Te1/4\", DESCR: \"X2 Transceiver 10Gbase-SR Te1/4\"", "= ShowInventory(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = inventory_obj.parse() def test_golden_iosv(self): self.maxDiff", "platform_obj.parse(status='active') self.assertEqual(parsed_output, self.golden_parsed_output_active) class test_show_env(test_show_env_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output)", "Software (C3900-UNIVERSALK9-M), Version 15.0(1)M7, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport", "}, }, }, } golden_output_iosv = {'execute.return_value': '''\\ NAME: \"IOSv\",", "EARL sub-module of 3\", \"pid\": \"WS-F6K-DFC4-A\", \"sn\": \"SAL171848KL\", \"vid\": \"V04\",", "Slot 0 SubSlot 3', 'pid': 'HWIC-2FE', 'sn': 'FOC16062824', 'vid': 'V02", "current state = 1 day, 16 hours, 42 minutes Image", "IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)", "by power on) System image file is \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\" This product", "Rev. 1.0\" PID: WS-X6824-SFP , VID: V01, SN: SAL17152EG9 NAME:", "'name': 'CISCO3945-CHASSIS', 'pid': 'CISCO3945-CHASSIS', 'sn': 'FGL161010K8', 'vid': 'V05 ', },", "{ 'chassis': 'CISCO3945-CHASSIS', 'chassis_sn': 'FGL161010K8', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Fri 05-Aug-11", "'Virtual Ethernet': '14', 'FastEthernet': '1', 'Gigabit Ethernet': '28', 'Ten Gigabit", "} golden_output_iosv = {'execute.return_value': '''\\ NAME: \"IOSv\", DESCR: \"IOSv chassis,", "self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.dev = Mock(**self.golden_output_1) obj", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware(test_show_platform_hardware_iosxe): def test_golden_active(self): self.device =", "\"PS 1 PWR-2700-AC/4\": { \"other\": { \"PS 1 PWR-2700-AC/4\": {", "test_show_env_iosxe,\\ TestShowModule as test_show_module_iosxe,\\ TestShowSwitch as test_show_switch_iosxe,\\ TestShowSwitchDetail as test_show_switch_detail_iosxe", "1', 'pid': 'EM-HDA-6FXO', 'sn': 'FOC85389QXB', 'vid': 'V03 ', }, },", "self.dev = Mock(**self.golden_output) obj = ShowEnvironment(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output,", "test_show_version_rp_iosxe,\\ TestShowProcessesCpu as test_show_processes_cpu_iosxe,\\ TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\\ TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\\", "\"sn\": \"ONT170202T5\", \"vid\": \"V06 \", } }, \"5\": { \"X2-10GB-SR\":", "SAL17152EG9 NAME: \"WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of", "'10:26:47 EST Mon Dec 9 2019', 'returned_to_rom_by': 'reload', 'rom': 'System", "ID : V07 CLEI Code Number : CMMPP00DRB Hardware Board", "import, export, transfer and use. Delivery of Cisco cryptographic products", "\"FXS170802GL\", } } }, \"FAN-MOD-4HS 1\": { \"other\": { \"FAN-MOD-4HS", "\"524288\", \"permissions\": \"-rw-\" }, \"boot\": { \"last_modified_date\": \"Jan 30 2013", "= None self.dev_iosv = Mock(**self.golden_output_iosv) redundancy_obj = ShowRedundancy(device=self.dev_iosv) parsed_output =", "{'execute.return_value': ''' NAME: \"3825 chassis\", DESCR: \"3825 chassis\" PID: CISCO3825", "self.dev_iosv = Mock(**self.device_output) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output,", "(VPN) Module DRAM configuration is 72 bits wide with parity", "self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitchDetail(device=self.dev_c3850) parsed_output", "engine sub-module of 2\", DESCR: \"WS-F6700-DFC3CXL Distributed Forwarding Card 3", "test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowProcessesCpuHistory(device=self.device) with", "A0 Motherboard revision number : A0 Model number : WS-C3750X-24S-E", "\"3825 chassis\", DESCR: \"3825 chassis\" PID: CISCO3825 , VID: V05", "Module - 8FXS/DID on Slot 1\", DESCR: \"High Density Voice", "= None self.dev_iosv = Mock(**self.device_output) version_obj = ShowVersion(device=self.dev_iosv) parsed_output =", "is subject to United States and local country laws governing", "Gigabit Ethernet interfaces The password-recovery mechanism is enabled. 512K bytes", "'3': { 'HWIC-2FE': { 'descr': 'Two-Port Fast Ethernet High Speed", "TestShowPlatformHardware as test_show_platform_hardware_iosxe,\\ TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\\ TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\\ TestShowPlatformHardwareQfpBqsIpmMapping", "'main_mem': '2027520', 'mem_size': { 'non-volatile configuration': '255', }, 'number_of_intfs': {", "4 Rev. 1.0\" PID: WS-F6K-DFC4-A , VID: V04, SN: SAL171848KL", "obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output,", "{ 'license_level': 'securityk9', 'license_type': 'Permanent', 'next_reload_license_level': 'securityk9', }, 'uc': {", "Jan 30 2013 00:00:00 +00:00 boot 264 drw- 0 Oct", "is 0x0'''} golden_parsed_output_ios = { 'version': {'bootldr': 'C3750E Boot Loader", "1986-2011 by Cisco Systems, Inc. Compiled Fri 05-Aug-11 00:32 by", "laws governing import, export, transfer and use. Delivery of Cisco", "1 PWR-2700-AC/4\": { \"name\": \"PS 1 PWR-2700-AC/4\", \"descr\": \"2700W AC", "obj = ShowProcessesCpu(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_version_rp(test_show_version_rp_iosxe):", "Card 4 Rev. 1.0\", \"name\": \"WS-F6K-DFC4-A Distributed Forwarding Card 4", "\"SFP-10GBase-SR\" PID: SFP-10G-SR , VID: V03 , SN: SPC1519005V NAME:", "test_show_bootvar(unittest.TestCase): dev = Device(name='ios') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value':", "golden_output = {'execute.return_value': '''\\ show processes cpu sorted 5min |", "2\" PID: CLK-7600 , VID: , SN: FXS181101V4 NAME: \"1\",", "\"main\": { \"chassis\": { \"WS-C6504-E\": { \"name\": \"WS-C6504-E\", \"descr\": \"Cisco", "self.golden_parsed_output_6) def test_golden_output_7(self): self.maxDiff = None self.device = Mock(**self.golden_output_7) obj", "Te1/4\", DESCR: \"X2 Transceiver 10Gbase-SR Te1/4\" PID: X2-10GB-SR , VID:", "self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.dev = Mock(**self.golden_output_1)", "\"1\", DESCR: \"SM-ES2-16-P\" PID: SM-ES2-16-P , VID: , SN: FOC09876NP3", "inventory_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_output_2(self): self.maxDiff = None self.device =", "Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { \"red_sys_info\": {", "= obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_empty(self):", "'0': { 'other': { 'AIM-VPN/SSL-2': { 'descr': 'Encryption AIM Element',", "Module\", DESCR: \"Stacking Module\" PID: C1010X-STACK , VID: V02 ,", "obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_empty(self): self.device1", "} golden_output_ios = {'execute.return_value': '''\\ Cisco IOS Software, C3750E Software", "4 EARL sub-module of 1\", \"pid\": \"VS-F6K-PFC4\", \"sn\": \"SAL17163901\", \"vid\":", "DESCR: \"OSR-7600 Clock FRU 2\" PID: CLK-7600 , VID: ,", "ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse(key_word='CPU', sort_time='5min') self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff", "Daughterboard serial number : FDO172217ED System serial number : FDO1633Q14S", "Software IOS (tm) s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE", "Dec 9 2019 System image file is \"flash0:c3900-universalk9-mz.SPA.150-1.M7.bin\" Last reload", "'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1", "= Mock(**self.golden_output_6) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_6)", "'V01', 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 -", "file is \"flash:c3750e-universalk9-mz\" This product contains cryptographic features and is", "def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) with", "self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_slot_internal) def test_empty(self): self.device1 =", "} golden_output_7 = {'execute.return_value': ''' NAME: \"2821 chassis\", DESCR: \"2821", "number : FDO202907UH Model revision number : W0 Motherboard revision", "\"SAL17142D06\", \"vid\": \"\", }, \"VS-F6K-PFC4\": { \"descr\": \"VS-F6K-PFC4 Policy Feature", "def test_empty(self): self.device1 = Mock(**self.empty_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1) with self.assertRaises(SchemaEmptyParserError):", "parsed_output = obj.parse(rp='standby', status='running') def test_empty(self): self.device1 = Mock(**self.empty_output) obj", "RELEASE SOFTWARE (fc1) best-c3945-IOS3 uptime is 1 hour, 20 minutes", "= None self.device = Mock(**self.golden_output_1) obj = ShowProcessesCpu(device=self.device) parsed_output =", "\"pid\": \"VS-F6K-MSFC5\", \"sn\": \"SAL17142D06\", \"vid\": \"\", }, \"VS-F6K-PFC4\": { \"descr\":", "class test_show_redundancy(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output =", "None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowModule(device=self.dev_c3850) parsed_output = platform_obj.parse()", "\"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS17070093\", } } }, \"1\": {", "ShowVersion(device=self.dev1) with self.assertRaises(AttributeError): parsered_output = version_obj.parse() def test_semi_empty(self): self.dev1 =", "\"active\": { \"boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\", \"configuration_register\": \"0x2012\" }, \"next_reload_boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\" }", "1\", DESCR: \"Wan Interface Card BRI U (2091, 3086)\" PID:", "\"ONT1702020H\", \"vid\": \"V06 \", } }, \"3\": { \"X2-10GB-SR\": {", "'15.0(1)M7', 'version_short': '15.0', }, } def test_empty(self): self.dev1 = Mock(**self.empty_output)", "Revision: 1.0\" PID: IOSv , VID: 1.0, SN: 9K66Z7TOKAACDEQA24N7S '''}", "CISCO7604 1\" PID: FAN-MOD-4HS , VID: V01, SN: DCH170900PF NAME:", ", VID: V01 , SN: ACW102938VS '''} golden_parsed_output_4 = {", "genie.metaparser.util.exceptions import SchemaEmptyParserError,\\ SchemaMissingKeyError from genie.libs.parser.ios.show_platform import ShowVersion,\\ Dir,\\ ShowRedundancy,\\", "test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) with self.assertRaises(SchemaEmptyParserError):", "22-Jan-18 04:07 by prod_rel_team ROM: Bootstrap program is C3750E boot", "SN: FOC2837465E '''} golden_parsed_output_7 = { 'main': { 'chassis': {", "Speed WAN Interface Card on Slot 0 SubSlot 3\", DESCR:", "\"descr\": \"OSR-7600 Clock FRU 1\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\":", "CISCO3825 , VID: V05 , SN: FTX7908A3RQ NAME: \"VWIC2-2MFT-T1/E1 -", "Rev. 3.4\" PID: WS-X6748-GE-TX , VID: V04, SN: SAL14017TWF NAME:", "self.golden_parsed_output_2) def test_golden_output_3(self): self.maxDiff = None self.device = Mock(**self.golden_output_3) obj", "bytes of ATA System CompactFlash 0 (Read/Write) License Info: License", "'''} golden_parsed_output_8 = { 'main': { 'chassis': { 'CISCO3825': {", "self.assertEqual( parsed_output, self.golden_parsed_output_serdes_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj =", "days, 5 hours, 16 minutes\", \"system_image\": \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\", \"chassis\": \"WS-C6503-E\", \"main_mem\":", "-rw- 147988420 Mar 29 2017 00:00:00 +00:00 vios-adventerprisek9-m 268 -rw-", "SN: FXS170802GL NAME: \"CLK-7600 2\", DESCR: \"OSR-7600 Clock FRU 2\"", "obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='active', status='running') self.maxDiff = None", "SFP\" PID: GLC-SX-MMD , VID: V01 , SN: ACW102938VS '''}", "System CompactFlash 0 (Read/Write) 0K bytes of ATA CompactFlash 1", "'CISCO3825', 'sn': 'FTX7908A3RQ', 'vid': 'V05 ', }, }, }, 'slot':", "'WS-C3210X-48T-S', 'sn': 'FD5678Z90P', 'subslot': { '2': { 'C3KX-PWR-007CBA': { 'descr':", "= {'execute.return_value': '''\\ ROM: Bootstrap program is IOSv '''} golden_parsed_output_iosv", "\"size\": \"524288\", \"permissions\": \"-rw-\" }, \"boot\": { \"last_modified_date\": \"Jan 30", "\"Jan 30 2013 00:00:00 +00:00\", \"index\": \"1\", \"size\": \"0\", \"permissions\":", "EtherSwitch\" PID: NM-16ESW , VID: V01 , SN: FOC135464KO NAME:", "\"WS-C6503-E-FAN\", \"vid\": \"V02\", \"sn\": \"DCH183500KW\", } } }, \"PS 1", "', }, }, '1': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 -", "16 hours, 42 minutes\", \"config_register\": \"0x0\", \"curr_sw_state\": \"ACTIVE\" } }", "obj.parse() def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj", "{ 'descr': 'SFP-10GBase-SR', 'name': 'TenGigabitEthernet1/1/1', 'pid': 'SFP-10G-SR', 'sn': 'SPC1519005V', 'vid':", "{ \"five_sec_cpu_total\": 13, \"five_min_cpu\": 15, \"one_min_cpu\": 23, \"five_sec_cpu_interrupts\": 0 }", "Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0', 'name': 'IOSv', 'pid': 'IOSv', 'sn':", "ethernet MAC Address : AC:F2:C5:FF:55:E7 Motherboard assembly number : 73-13061-04", "\"PIM Process\", \"five_min_cpu\": 0.48, \"runtime\": 362874, \"pid\": 368, \"five_sec_cpu\": 1.03", "Speed WAN Interface Card on Slot 0 SubSlot 3', 'pid':", "'1', 'pid': 'SM-ES2-16-P', 'sn': 'FOC09876NP3', 'vid': '', }, }, },", "'sn': 'QCS1604P0BT', 'vid': 'V03 ', }, }, }, }, }", "\"4\", \"descr\": \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4\",", "16 minutes Time since cat6k_tb1 switched to active is 10", "of 1\", \"pid\": \"WS-F6K-PFC3BXL\", \"sn\": \"SAL11434LYG\", \"vid\": \"V01\", }, }", "bytes of non-volatile configuration memory. 2000880K bytes of ATA System", "= ShowSwitchDetail(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) if __name__ == '__main__':", "PWR-2700-AC/4\": { \"other\": { \"PS 2 PWR-2700-AC/4\": { \"name\": \"PS", "V07 CLEI Code Number : CMMPP00DRB Hardware Board Revision Number", "= platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def test_empty(self): self.device =", "'name': 'Switch 1 - FlexStackPlus Module', 'pid': 'C1010X-STACK', 'sn': 'FD232323XXZ',", "'FOC16050QP6' } } }, 'license_package': { 'data': { 'license_level': 'datak9',", "'Permanent', 'next_reload_license_level': 'ipbasek9', }, 'security': { 'license_level': 'securityk9', 'license_type': 'Permanent',", "{'version_short': '15.2', 'platform': 'C3750E', 'version': '15.2(2)E8', 'image_id': 'C3750E-UNIVERSALK9-M', 'os': 'IOS',", "264 drw- 0 Oct 14 2013 00:00:00 +00:00 config 267", ", VID: V03, SN: FXS1821Q2H9 NAME: \"CLK-7600 1\", DESCR: \"OSR-7600", "'V02L ', }, }, '1/0/49': { 'GLC-SX-MMD': { 'descr': '1000BaseSX", "to <EMAIL>. Cisco IOSv (revision 1.0) with with 435457K/87040K bytes", "VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC65428K9F NAME: \"Wan Interface", "1990 by Meridian Technology Corp). X.25 software, Version 3.0.0. Bridging", "FOC63358WSI NAME: \"High Density Voice Module - 8FXS/DID on Slot", "\"size\": \"0\", \"permissions\": \"drw-\" }, \"vios-adventerprisek9-m\": { \"last_modified_date\": \"Mar 29", "10Gbase-SR Te2/16\", \"name\": \"Transceiver Te2/16\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170201TT\", \"vid\":", "Flash internal SIMM (Sector size 512K). Configuration register is 0x2102", "self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj", "is 1 day, 16 hours, 42 minutes System returned to", "'subslot': { '0': { 'GE-DCARD-ESW': { 'descr': 'Gigabit(1000BaseT) module for", "{ \"WS-F6K-DFC4-A\": { \"descr\": \"WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev.", "'processor_board_flash': '2000880K', 'processor_type': 'C3900-SPE150/K9', 'returned_to_rom_at': '10:26:47 EST Mon Dec 9", "def test_golden(self): self.device = Mock(**self.golden_output_serdes_internal) obj = ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output =", "'pid': 'EM-HDA-6FXO', 'sn': 'FOC85389QXB', 'vid': 'V03 ', }, }, },", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_subslot) def test_golden_slot_internal(self): self.device = Mock(**self.golden_output_slot_internal)", "voice interface daughtercard\" PID: EM-HDA-6FXO , VID: V03 , SN:", "reload type: Normal Reload Last reload reason: Reload Command This", "{ 'descr': 'SM-ES2-16-P', 'name': '1', 'pid': 'SM-ES2-16-P', 'sn': 'FOC09876NP3', 'vid':", "', }, }, }, 'vid': 'V05 ', }, }, },", "3 days, 10 hours, 27 minutes System returned to ROM", "'C3900-SPE150/K9', 'sn': 'FOC16050QP6', 'subslot': { '3': { 'HWIC-2FE': { 'descr':", "'vid': 'V07 ', }, }, }, 'slot': { '0': {", "type: Normal Reload Last reload reason: Reload Command This product", "'prod_rel_team', 'compiled_date': 'Fri 05-Aug-11 00:32', 'curr_config_register': '0x2102', 'hostname': 'best-c3945-IOS3', 'image_id':", "\"dir\": { \"flash0:/\": { \"files\": { \"e1000_bia.txt\": { \"last_modified_date\": \"Oct", "test_golden_ios_1(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_1) version_obj = ShowVersion(device=self.dev_iosv)", "= {'execute.return_value': ''' best-c3945-IOS3#show inventory NAME: \"CISCO3945-CHASSIS\", DESCR: \"CISCO3945-CHASSIS\" PID:", "\"X2-10GB-SR\", \"sn\": \"ONT170202T5\", \"vid\": \"V06 \", } }, \"5\": {", "SAL17152N0F NAME: \"msfc sub-module of 1\", DESCR: \"VS-F6K-MSFC5 CPU Daughterboard", "parsed_output = obj.parse(slot='0') class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def test_empty(self): self.device = Mock(**self.empty_output)", "\"image_id\": \"VIOS-ADVENTERPRISEK9-M\", 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 29-Mar-17 14:05', \"processor_type\": \"revision", "class test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev = Device(name='c3850') empty_output = {'execute.return_value': ''} golden_parsed_output", "power-on System restarted at 05:06:40 GMT Tue Sep 10 2019", "SN: FXS1712Q1R8 NAME: \"CLK-7600 1\", DESCR: \"OSR-7600 Clock FRU 1\"", "}, }, }, }, } golden_output_5 = {'execute.return_value': ''' best-c3945-IOS3#show", "WAN\" PID: NM-1T3/E3= , VID: V01 , SN: FOC28476ADM NAME:", "Slot 0 SubSlot 1\", DESCR: \"Wan Interface Card BRI U", "PID: WS-F6K-PFC3BXL , VID: V01, SN: SAL11434LYG NAME: \"2\", DESCR:", "'LTP13579L3R', 'vid': 'V01L ', }, }, '2/1/1': { 'SFP-10G-LR': {", "'0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex", "12.2(18)SXF7, RELEASE SOFTWARE (fc1)\", \"hostname\": \"cat6k_tb1\", \"uptime\": \"10 weeks, 5", "with four DSPs', 'name': 'PVDMII DSP SIMM with four DSPs", "ShowProcessesCpu(device=self.device) parsed_output = obj.parse(key_word='process') self.assertEqual(parsed_output, self.golden_parsed_output_1) def test_empty(self): self.device1 =", "PID: CISCO3825 , VID: V05 , SN: FTX7908A3RQ NAME: \"VWIC2-2MFT-T1/E1", "FOC65428K9F NAME: \"Wan Interface Card BRI U (2091, 3086) on", "Mock(**self.golden_output_c3850) platform_obj = ShowPlatform(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_c3850) def", "\"five_min_cpu\": 15, \"one_min_cpu\": 23, \"five_sec_cpu_interrupts\": 0 } golden_output = {'execute.return_value':", "sample_switch uptime is 8 weeks, 3 days, 10 hours, 27", "= ShowInventory(device=self.dev_iosv) parsed_output = inventory_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_output_2(self): self.maxDiff", "board ID FDO2028F1WK Last reset from power-on 2 Virtual Ethernet", "NAME: \"msfc sub-module of 1\", DESCR: \"WS-SUP720 MSFC3 Daughterboard Rev.", "V06 , SN: ONT170202UU NAME: \"Transceiver Te2/4\", DESCR: \"X2 Transceiver", "class test_show_platform(test_show_platform_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowPlatform(device=self.dev1)", "self.dev_iosv = Mock(**self.golden_output_iosv) platform_obj = ShowBootvar(device=self.dev_iosv) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,", "Version ID : V07 CLEI Code Number : CMMPP00DRB Hardware", "10BaseT/100BaseTX EtherSwitch\" PID: NM-16ESW , VID: V01 , SN: FOC135464KO", "with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def test_empty(self): self.device", "'TenGigabitEthernet2/1/1', 'pid': 'SFP-10G-LR', 'sn': 'ONT182746GZ', 'vid': 'V02 ', }, },", "five minutes: 9% PID Runtime(ms) Invoked uSecs 5Sec 1Min 5Min", "2018 18:57:18 +00:00\", \"index\": \"269\", \"size\": \"119\", \"permissions\": \"-rw-\" },", "disabled. 256K bytes of non-volatile configuration memory. 2097152K bytes of", "configuration is 72 bits wide with parity enabled. 255K bytes", "CTS Rev. 1.5\" PID: VS-SUP2T-10G , VID: V05, SN: SAL17152N0F", "EARL sub-module of 1\", \"pid\": \"VS-F6K-PFC4\", \"sn\": \"SAL17163901\", \"vid\": \"V03\",", "to ROM by reload at 10:26:47 EST Mon Dec 9", "SOFTWARE (fc2)\", \"uptime_in_curr_state\": \"1 day, 16 hours, 42 minutes\", \"config_register\":", "Mock(**self.empty_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = cpu_platform_obj.parse() class", "RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 1',", "VID: V01 , SN: FOC28476ADM NAME: \"16 Port 10BaseT/100BaseTX EtherSwitch", "us by sending email to <EMAIL>. Cisco CISCO3945-CHASSIS (revision 1.1)", "'power-on', 'system_restarted_at': '05:06:40 GMT Tue Sep 10 2019', 'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin',", "= 0 minutes Switchovers system experienced = 0 Standby failures", "} }, \"3\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR", "test_golden_output_5(self): self.maxDiff = None self.device = Mock(**self.golden_output_5) obj = ShowInventory(device=self.device)", "ShowPlatformHardwareSerdes,\\ ShowPlatformHardwareSerdesInternal,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\\ ShowPlatformHardwareQfpStatisticsDrop,\\ ShowEnvironment,\\ ShowModule,\\ ShowSwitch, ShowSwitchDetail from", "self.assertEqual(parsed_output, self.golden_parsed_output_active) class test_show_env(test_show_env_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj", "test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "chassis\" PID: CISCO3825 , VID: V05 , SN: FTX7908A3RQ NAME:", "'C1010X-STACK': { 'descr': 'Stacking Module', 'name': 'Switch 1 - FlexStackPlus", "5 days, 5 hours, 15 minutes System returned to ROM", "is 8 weeks, 3 days, 10 hours, 27 minutes System", "Mock(**self.golden_output_5) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_5) def", "\"descr\": \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4\", \"pid\":", "'sn': 'LIT03728KKK', 'vid': 'V02L ', }, }, '1/0/49': { 'GLC-SX-MMD':", "= 0x0 Peer (slot: 0) information is not available because", "10Gbase-SR Te2/6\", \"name\": \"Transceiver Te2/6\", \"pid\": \"X2-10GB-SR\", \"sn\": \"FNS153920YJ\", \"vid\":", "(fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2018 by Cisco Systems,", "\"name\": \"PS 1 PWR-2700-AC/4\", \"descr\": \"2700W AC power supply for", "'C3750E', 'processor_type': 'PowerPC405', 'returned_to_rom_by': 'power-on', 'rom': 'Bootstrap program is C3750E", "{ 'descr': 'Gigabit(1000BaseT) module for EtherSwitch NM', 'name': 'Gigabit(1000BaseT) module", "Model revision number : A0 Motherboard revision number : A0", "{ \"Gigabit Ethernet/IEEE 802.3\": \"50\", 'Virtual Ethernet/IEEE 802.3': '1' },", "X2-10GB-SR , VID: V06 , SN: FNS153920YJ NAME: \"Transceiver Te2/16\",", "Te1/5\", DESCR: \"X2 Transceiver 10Gbase-SR Te1/5\" PID: X2-10GB-SR , VID:", "1\" PID: WS-C6503-E-FAN , VID: V02, SN: DCH183500KW NAME: \"PS", "= None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowModule(device=self.dev_c3850) parsed_output =", "self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_iosv", "\"version\": { \"last_reload_reason\": \"Unknown reason\", \"hostname\": \"N95_1\", \"os\": \"IOS\", \"version_short\":", "'IOSv': { 'descr': 'IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision:", "test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) redundancy_obj = ShowRedundancy(device=self.dev_iosv)", "'name': 'C3900 AC Power Supply 1', 'pid': 'PWR-3900-AC', 'sn': 'QCS1604P0BT',", "Technology-package Current Type Next reboot ------------------------------------------------------------------ ipbase ipbasek9 Permanent ipbasek9", "'HWIC-2FE': { 'descr': 'Two-Port Fast Ethernet High Speed WAN Interface", "}, '1': { 'other': { 'EVM-HD-8FXS/DID': { 'descr': 'High Density", "ROM by power-on System restarted at 05:06:40 GMT Tue Sep", "parsed_output = obj.parse() class test_show_version_rp(test_show_version_rp_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active)", "on Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC65428K9F', 'vid':", "'pid': 'CISCO3845', 'sn': 'FTX6666ARJ9', 'vid': 'V05 ', }, }, },", "}, 'slot': { '0': { 'lc': { 'CISCO3845-MB': { 'descr':", "Version ID : V03 CLEI Code Number : CMMFF00ARC Hardware", "\"WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of 2\", \"pid\":", "CPU utilization for five seconds: 4%/0%; one minute: 4%; five", "'rom': 'Bootstrap program is C3750E boot loader', 'rtr_type': 'WS-C3750X-24P', 'system_image':", "'C3900-SPE150/K9', 'returned_to_rom_at': '10:26:47 EST Mon Dec 9 2019', 'returned_to_rom_by': 'reload',", "\"WS-C6503-E-FAN 1\", \"descr\": \"Enhanced 3-slot Fan Tray 1\", \"pid\": \"WS-C6503-E-FAN\",", "C3KX-PWR-007CBA , VID: V01L , SN: LTP13579L3R NAME: \"TenGigabitEthernet2/1/1\", DESCR:", "'''} golden_parsed_output_9 = { 'main': { 'chassis': { 'CISCO3845': {", "}, 'number_of_intfs': { 'FastEthernet': '2', 'Gigabit Ethernet': '3', }, 'os':", "High Speed WAN Interface Card\" PID: HWIC-2FE , VID: V02", "memory. 2097152K bytes of ATA System CompactFlash 0 (Read/Write) 0K", "{ \"name\": \"PS 1 PWR-1400-AC\", \"descr\": \"AC power supply, 1400", "Mock(**self.golden_output_port) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(port='0/0/0') self.maxDiff = None", "'returned_to_rom_by': 'power-on', 'system_restarted_at': '05:06:40 GMT Tue Sep 10 2019', 'system_image':", "NAME: \"1\", DESCR: \"SM-ES2-16-P\" PID: SM-ES2-16-P , VID: , SN:", "self.assertEqual(parsed_output, self.golden_parsed_output_serdes) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdes(device=self.device1)", "} }, } } }, \"WS-C6503-E-FAN 1\": { \"other\": {", "'chassis': 'WS-C3750X-24P', 'chassis_sn': 'FDO2028F1WK', 'curr_config_register': '0xF', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed", "Power Supply', 'name': 'Switch 2 - Power Supply 1', 'pid':", "4 EARL sub-module of 3\", \"pid\": \"WS-F6K-DFC4-A\", \"sn\": \"SAL171848KL\", \"vid\":", "MSFC3 Daughterboard Rev. 3.1\", \"name\": \"msfc sub-module of 1\", \"pid\":", "'descr': 'Six port FXO voice interface daughtercard', 'name': 'Six port", "\"0x2012\" }, \"next_reload_boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\" } golden_output_iosv = {'execute.return_value': '''\\ BOOT", "NM', 'name': 'Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2", "'WS-C0123X-45T-S': { 'descr': 'WS-C8888X-88', 'name': '1', 'pid': 'WS-C0123X-45T-S', 'sn': 'FDO123R12W',", "\", } }, \"4\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver", "PWR-2700-AC/4 , VID: V03, SN: APS17070093 '''} golden_parsed_output_3 = {", "1986-2006 by cisco Systems, Inc. Compiled Thu 23-Nov-06 06:26 by", "}, }, 'slot': { '0': { 'rp': { 'C3900-SPE150/K9': {", "reset from power-on 14 Virtual Ethernet interfaces 1 FastEthernet interface", "import SchemaEmptyParserError,\\ SchemaMissingKeyError from genie.libs.parser.ios.show_platform import ShowVersion,\\ Dir,\\ ShowRedundancy,\\ ShowInventory,\\", "test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowProcessesCpuHistory(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output", "{ \"name\": \"1\", \"descr\": \"WS-SUP720-3BXL 2 ports Supervisor Engine 720", "Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0', 'name': 'IOSv', 'pid': 'IOSv',", "'descr': 'WS-C8888X-88', 'name': '1', 'pid': 'WS-C0123X-45T-S', 'sn': 'FDO123R12W', 'subslot': {", ", VID: V05, SN: SAL17152N0F NAME: \"msfc sub-module of 1\",", "1', 'pid': 'C3KX-PWR-007CBA', 'sn': 'LTP13579L3R', 'vid': 'V01L ', }, },", ", VID: V01 , SN: FOC98675U0D NAME: \"VWIC2-2MFT-T1/E1 - 2-Port", "'descr': '2821 chassis', 'name': '2821 chassis', 'pid': 'CISCO2821', 'sn': 'FTX1234AMWT',", "'main': { 'chassis': { 'CISCO3825': { 'descr': '3825 chassis', 'name':", "27 minutes', 'returned_to_rom_by': 'power-on', 'system_restarted_at': '05:06:40 GMT Tue Sep 10", "at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us", "FOC91864MNN '''} golden_parsed_output_9 = { 'main': { 'chassis': { 'CISCO3845':", "hours, 42 minutes System returned to ROM by reload System", "= ShowBootvar(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff", "Forwarding Card Rev. 4.1\" PID: WS-F6700-CFC , VID: V06, SN:", "{ 'main': { 'chassis': { 'IOSv': { 'descr': 'IOSv chassis,", "port 10GE Rev. 2.0\" PID: WS-X6816-10GE , VID: V02, SN:", "= None self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.device", "interfaces 2 Ten Gigabit Ethernet interfaces The password-recovery mechanism is", "0', 'pid': 'AIM-VPN/SSL-2', 'sn': 'FOC2837465E', 'vid': 'V01', 'subslot': { '0':", "\"name\": \"Transceiver Te2/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T5\", \"vid\": \"V06 \",", "Mock(**self.empty_output) platform_obj = ShowSwitchDetail(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def", "}, 'security': { 'license_level': 'securityk9', 'license_type': 'Permanent', 'next_reload_license_level': 'securityk9', },", "self.golden_parsed_output_c3850) def test_golden_asr1k(self): self.maxDiff = None self.dev_asr1k = Mock(**self.golden_output_asr1k) platform_obj", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_3) def test_golden_output_4(self): self.maxDiff = None self.device =", "NAME: \"WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of 3\",", "Version 15.0(1)M7, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c)", "Feature Card 3 Rev. 1.8\", \"name\": \"switching engine sub-module of", "Version 15.2(2)E8, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c)", "'''} golden_output_ios_1 = {'execute.return_value': '''\\ Cisco IOS Software, C3750E Software", "self.assertEqual(parsed_output, self.golden_parsed_output_c3850) def test_golden_asr1k(self): self.maxDiff = None self.dev_asr1k = Mock(**self.golden_output_asr1k)", "Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)', 'rtr_type': 'CISCO3945-CHASSIS', 'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin',", "Mock(**self.golden_output_active_opm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0',", "self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output", "\"lc\": { \"WS-X6748-GE-TX\": { \"name\": \"2\", \"descr\": \"WS-X6748-GE-TX CEF720 48", "inventory_obj = ShowInventory(device=self.dev_iosv) parsed_output = inventory_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_output_2(self):", "BRI U (2091, 3086)\" PID: WIC-1B-U-V2 , VID: V01, SN:", "'system_restarted_at': '12:22:21 PDT Mon Sep 10 2018', 'uptime': '9 weeks,", "SAL14017TWF NAME: \"WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4\",", "3 Rev. 1.8\" PID: WS-F6K-PFC3BXL , VID: V01, SN: SAL11434LYG", "System restarted at 05:06:40 GMT Tue Sep 10 2019 System", "} } golden_output_iosv = {'execute.return_value': '''\\ Redundant System Information :", "C3750E boot loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE,", "}, \"PS 1 PWR-1400-AC\": { \"other\": { \"PS 1 PWR-1400-AC\":", "1.0) with with 435457K/87040K bytes of memory. Processor board ID", "---------- ---------- * 1 30 WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M Configuration register", "= Mock(**self.empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(AttributeError): parsered_output = version_obj.parse()", "Transceiver 10Gbase-SR Te2/5\" PID: X2-10GB-SR , VID: V05 , SN:", "ROM: System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1) best-c3945-IOS3 uptime", "}, }, }, } golden_output_5 = {'execute.return_value': ''' best-c3945-IOS3#show inventory", "minutes: 9% PID Runtime(ms) Invoked uSecs 5Sec 1Min 5Min TTY", "Gigabit Ethernet interfaces 1 Virtual Private Network (VPN) Module DRAM", "1.5\", \"pid\": \"VS-SUP2T-10G\", \"vid\": \"V05\", \"sn\": \"SAL17152N0F\", \"subslot\": { \"0\":", "{ \"WS-SUP720-3BXL\": { \"name\": \"1\", \"descr\": \"WS-SUP720-3BXL 2 ports Supervisor", "of 2\", \"pid\": \"WS-F6700-DFC3CXL\", \"sn\": \"SAL1214LAG5\", \"vid\": \"V01\", } }", "ipbase ipbasek9 Permanent ipbasek9 security securityk9 Permanent securityk9 uc None", "'mem_size': { 'flash-simulated non-volatile configuration': '512' }, 'curr_config_register': '0xF' }", "serial number : FDO1633Q14S Top Assembly Part Number : 800-33746-04", "0.07% 0 OSPF-1 Hello '''} def test_empty(self): self.dev = Mock(**self.empty_output)", "with parity enabled. 255K bytes of non-volatile configuration memory. 2000880K", "by sending email to <EMAIL>. cisco WS-C6503-E (R7000) processor (revision", "VID: , SN: FOC09876NP3 '''} golden_parsed_output_6 = { 'slot': {", ": FDO202907UH Model revision number : W0 Motherboard revision number", "for EtherSwitch NM on Slot 2 SubSlot 0', 'pid': 'GE-DCARD-ESW',", "''' # show inventory NAME: \"WS-C6503-E\", DESCR: \"Cisco Systems Catalyst", "for EtherSwitch NM\" PID: GE-DCARD-ESW , VID: V01 , SN:", "\"tty\": 0, \"one_min_cpu\": 0.07, \"process\": \"OSPF-1 Hello\", \"five_min_cpu\": 0.07, \"runtime\":", "'descr': 'Stacking Module', 'name': 'Switch 1 - FlexStackPlus Module', 'pid':", "Mock(**self.golden_output_active) obj = ShowPlatformHardware(device=self.device) parsed_output = obj.parse() self.maxDiff = None", "engine sub-module of 2\", \"pid\": \"WS-F6700-DFC3CXL\", \"sn\": \"SAL1214LAG5\", \"vid\": \"V01\",", "'0xF' } } device_output = {'execute.return_value':''' best-c3945-IOS3#show version Cisco IOS", "\"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/3\", \"name\": \"Transceiver Te2/3\",", "\"five_sec_cpu\": 0.15 } }, \"five_sec_cpu_total\": 4, \"five_min_cpu\": 9, \"one_min_cpu\": 4,", "obj = ShowEnvironment(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_module(test_show_module_iosxe):", "for CISCO7604 1\" PID: PWR-2700-AC/4 , VID: V03, SN: APS1707008Y", "AC power supply for CISCO7604 1\" PID: PWR-2700-AC/4 , VID:", "'V01 ', }, }, }, 'vid': 'V05 ', }, },", "'2027520', 'mem_size': { 'non-volatile configuration': '255', }, 'number_of_intfs': { 'FastEthernet':", "reload license Level: ipservices cisco WS-C3750X-24P (PowerPC405) processor (revision W0)", "golden_output_iosv = {'execute.return_value': '''\\ Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M),", "\"chassis\": \"WS-C6503-E\", \"main_mem\": \"983008\", \"processor_type\": \"R7000\", 'sp_by': 'power on', 'returned_to_rom_at':", "SN: ABC0830J127 '''} golden_output_4 = {'execute.return_value': ''' NAME: \"1\", DESCR:", "1\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1\"", "Module on Slot 0', 'pid': 'AIM-VPN/SSL-2', 'sn': 'FOC2837465E', 'vid': 'V01',", "'Switch 2 - Power Supply 1', 'pid': 'C3KX-PWR-007CBA', 'sn': 'LTP13579L3R',", "'production image', 'last_reload_reason': 'Reload Command', 'last_reload_type': 'Normal Reload', 'license_udi': {", "'last_reload_reason': 'Reload Command', 'last_reload_type': 'Normal Reload', 'license_udi': { 'device_num': {", "NAME: \"3\", DESCR: \"WS-X6824-SFP CEF720 24 port 1000mb SFP Rev.", "NAME: \"Clear/Subrate T3/E3 WAN on Slot 1\", DESCR: \"Clear/Subrate T3/E3", "{ \"dir\": { \"flash0:/\": { \"files\": { \"e1000_bia.txt\": { \"last_modified_date\":", "bytes of memory. Processor board ID FDO1633Q14S Last reset from", "= None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformPower(device=self.device) parsed_output =", "minutes System returned to ROM by reload System image file", "Slot 0', 'pid': 'CISCO3845-MB', 'sn': 'FOC729346GQ', 'vid': 'V09 ', },", "Te2/6\" PID: X2-10GB-SR , VID: V06 , SN: FNS153920YJ NAME:", "{ \"descr\": \"X2 Transceiver 10Gbase-SR Te2/1\", \"name\": \"Transceiver Te2/1\", \"pid\":", "Processor board ID 9K66Z7TOKAACDEQA24N7S 6 Gigabit Ethernet interfaces DRAM configuration", "1000mb SFP Rev. 1.0\", \"pid\": \"WS-X6824-SFP\", \"vid\": \"V01\", \"sn\": \"SAL17152EG9\",", "14:05 by prod_rel_team ROM: Bootstrap program is IOSv N95_1 uptime", "Density Voice Module - 8FXS/DID\" PID: EVM-HD-8FXS/DID , VID: V04", "'license_level': 'ipservices', 'license_type': 'Permanent', 'next_reload_license_level': 'ipservices', 'chassis': 'WS-C3750X-24S', 'main_mem': '524288',", "SN: FDO123R12W NAME: \"Switch 1 - Power Supply 1\", DESCR:", "(revision 1.0) with with 435457K/87040K bytes of memory. Processor board", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_module(test_show_module_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output)", "comply with U.S. and local laws, return this product immediately.", "\"image_type\": \"production image\", 'processor_board_flash': '10080K', 'returned_to_rom_by': 'reload', \"main_mem\": \"435457\", \"mem_size\":", "self.device = Mock(**self.golden_output_4) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output,", "test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardware(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "\"vid\": \"\", }, \"VS-F6K-PFC4\": { \"descr\": \"VS-F6K-PFC4 Policy Feature Card", "obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_8) def test_golden_output_9(self):", "{ \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/6\", \"name\": \"Transceiver", "on Slot 0', 'pid': 'AIM-VPN/SSL-2', 'sn': 'FOC2837465E', 'vid': 'V01', 'subslot':", "self.maxDiff = None self.device = Mock(**self.golden_output_8) obj = ShowInventory(device=self.device) parsed_output", "800-32727-03 Daughterboard serial number : FDO202823P8 System serial number :", "Dir(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsered_output = dir_obj.parse() def test_semi_empty(self): self.dev1 =", ", VID: V06 , SN: ONT17020338 NAME: \"Transceiver Te2/2\", DESCR:", "= ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(port='0/0/0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_port)", "{ \"0\": { \"WS-SUP720\": { \"descr\": \"WS-SUP720 MSFC3 Daughterboard Rev.", "''' NAME: \"2821 chassis\", DESCR: \"2821 chassis\" PID: CISCO2821 ,", "\"flash:c3750e-universalk9-mz\" This product contains cryptographic features and is subject to", "days, 10 hours, 27 minutes', 'returned_to_rom_by': 'power-on', 'system_restarted_at': '05:06:40 GMT", "\"processor_type\": \"revision 1.0\", \"platform\": \"IOSv\", \"image_type\": \"production image\", 'processor_board_flash': '10080K',", "2.0\" PID: VS-F6K-MSFC5 , VID: , SN: SAL17142D06 NAME: \"VS-F6K-PFC4", "\"packet buffer\": \"8192\"}, \"curr_config_register\": \"0x2102\", } } golden_output_ios_cat6k = {'execute.return_value':", "1\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\", } } },", "parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.device =", "1.0\" PID: WS-X6824-SFP , VID: V01, SN: SAL17152EG9 NAME: \"WS-F6K-DFC4-A", "http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by", "FD2043B0K3 NAME: \"Switch 1 - Power Supply 1\", DESCR: \"LLL", "golden_output_iosv = {'execute.return_value': '''\\ Directory of flash0:/ 1 drw- 0", "reload System image file is \"flash0:/vios-adventerprisek9-m\" Last reload reason: Unknown", "supply for CISCO7604 1\" PID: PWR-2700-AC/4 , VID: V03, SN:", "802.3\": \"50\", 'Virtual Ethernet/IEEE 802.3': '1' }, \"mem_size\": {\"non-volatile configuration\":", "golden_output_iosv = {'execute.return_value': '''\\ BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE variable", "Voice Module - 8FXS/DID\" PID: EVM-HD-8FXS/DID , VID: V04 ,", "(SP by power on) System image file is \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\" This", "\"X2-10GB-SR\", \"sn\": \"ONT170201TT\", \"vid\": \"V06 \", } }, }, }", "self.golden_parsed_output_7) def test_golden_output_8(self): self.maxDiff = None self.device = Mock(**self.golden_output_8) obj", "10Gbase-SR Te1/5\" PID: X2-10GB-SR , VID: V06 , SN: ONT1702033D", "at 21:57:23 UTC Sat Aug 28 2010 (SP by power", "'Gigabit Ethernet': '28', 'FastEthernet': '1' }, 'os': 'IOS', 'platform': 'C3750E',", "{'execute.return_value': ''' NAME: \"1\", DESCR: \"SM-ES2-16-P\" PID: SM-ES2-16-P , VID:", "def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaMissingKeyError):", "\"X2 Transceiver 10Gbase-SR Te2/3\", \"name\": \"Transceiver Te2/3\", \"pid\": \"X2-10GB-SR\", \"sn\":", "parsed_output = platform_obj.parse(status='active') self.assertEqual(parsed_output, self.golden_parsed_output_active) class test_show_env(test_show_env_iosxe): def test_empty(self): self.dev", "and users are responsible for compliance with U.S. and local", "Power Supply', 'name': 'Switch 1 - Power Supply 1', 'pid':", ", VID: V01 , SN: FOC65428K9F NAME: \"Wan Interface Card", "4, \"five_min_cpu\": 9, \"one_min_cpu\": 4, \"nonzero_cpu_processes\": [ \"PIM Process\", \"IOSv", "show version Cisco Internetwork Operating System Software IOS (tm) s72033_rp", "Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) cat6k_tb1 uptime is 10 weeks,", "= version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_ios(self): self.maxDiff = None self.dev_iosv", ": A0 Motherboard revision number : A0 Model number :", "Permanent Next reload license Level: ipservices cisco WS-C3750X-24P (PowerPC405) processor", "\"rom\": \"System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)\", \"bootldr\": \"s72033_rp", "with four DSPs on Slot 0 SubSlot 4', 'pid': 'PVDM2-64',", "VID: V05 , SN: FOC16050QP6 NAME: \"Two-Port Fast Ethernet High", "None self.device = Mock(**self.golden_output_5) obj = ShowInventory(device=self.device) parsed_output = obj.parse()", "ShowSwitchDetail(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) if __name__ == '__main__': unittest.main()", ", VID: V07 , SN: FTX1234AMWT NAME: \"VWIC2-2MFT-T1/E1 - 2-Port", "- T1/E1 on Slot 0 SubSlot 1', 'pid': 'VWIC2-2MFT-T1/E1', 'sn':", "Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 15.2(2)E8, RELEASE SOFTWARE (fc1) Technical", "10 2018', 'uptime': '9 weeks, 4 days, 2 hours, 3", "\"WS-F6700-CFC\": { \"descr\": \"WS-F6700-CFC Centralized Forwarding Card Rev. 4.1\", \"name\":", "ipservices License Type: Permanent Next reload license Level: ipservices cisco", "Reload Last reload reason: Reload Command This product contains cryptographic", "CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4\", \"pid\": \"WS-X6748-GE-TX\", \"vid\":", "'chassis': { 'CISCO3845': { 'descr': '3845 chassis', 'name': '3845 chassis',", "test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device)", "{ \"0\": { \"VS-F6K-MSFC5\": { \"descr\": \"VS-F6K-MSFC5 CPU Daughterboard Rev.", "Number : B0 Version ID : V03 CLEI Code Number", "802.3 interface 50 Gigabit Ethernet/IEEE 802.3 interfaces 1917K bytes of", "= ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='opm') self.assertEqual(parsed_output,", "2.0\", \"name\": \"VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of", "'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1', 'name':", "}, }, }, 'slot': { '0': { 'rp': { 'CISCO3825':", "\"name\": \"msfc sub-module of 1\", \"pid\": \"WS-SUP720\", \"sn\": \"SAL11434N9G\", \"vid\":", "enabled. 255K bytes of non-volatile configuration memory. 2000880K bytes of", "self.device = Mock(**self.golden_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device) parsed_output = cpu_platform_obj.parse() self.maxDiff", "{ 'SFP-10G-LR': { 'descr': 'SFP-10GBase-LR', 'name': 'TenGigabitEthernet2/1/1', 'pid': 'SFP-10G-LR', 'sn':", "ShowPlatform(device=self.dev_asr1k) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_asr1k) class test_show_platform_power(test_show_platform_power_iosxe): def test_empty(self):", "{'execute.return_value': ''' NAME: \"2821 chassis\", DESCR: \"2821 chassis\" PID: CISCO2821", "revision number : A0 Model number : WS-C3750X-24S-E Daughterboard assembly", "test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowEnvironment(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output", "10GE w/ CTS Rev. 1.5\" PID: VS-SUP2T-10G , VID: V05,", "20 minutes', 'version': '15.0(1)M7', 'version_short': '15.0', }, } def test_empty(self):", "\"PS 1 PWR-1400-AC\", \"descr\": \"AC power supply, 1400 watt 1\",", "self.golden_parsed_output_3) def test_golden_output_4(self): self.maxDiff = None self.device = Mock(**self.golden_output_4) obj", "chassis\", DESCR: \"3845 chassis\" PID: CISCO3845 , VID: V05 ,", "'Clear/Subrate T3/E3 WAN', 'name': 'Clear/Subrate T3/E3 WAN on Slot 1',", "ShowInventory(device=self.dev_iosv) parsed_output = inventory_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_output_2(self): self.maxDiff =", "Hello\", \"five_min_cpu\": 0.07, \"runtime\": 113457, \"pid\": 412, \"five_sec_cpu\": 0.15 }", "Software state = ACTIVE Uptime in current state = 1", "self.dev_iosv = Mock(**self.golden_output_iosv) inventory_obj = ShowInventory(device=self.dev_iosv) parsed_output = inventory_obj.parse() self.assertEqual(parsed_output,", "= Mock(**self.device_output) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.parsed_output)", "'sn': 'ACW102938VS', 'vid': 'V01 ', }, }, }, 'vid': 'V05", "RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2011 by", "and local country laws governing import, export, transfer and use.", "}, }, '4': { 'PVDM2-64': { 'descr': 'PVDMII DSP SIMM", "FGL161010K8 2 FastEthernet interfaces 3 Gigabit Ethernet interfaces 1 Virtual", "T1/E1 on Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675U0D',", "= None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitch(device=self.dev_c3850) parsed_output =", "Element\" PID: AIM-VPN/SSL-3 , VID: V01, SN: FOC758693YO NAME: \"Clear/Subrate", "as test_show_module_iosxe,\\ TestShowSwitch as test_show_switch_iosxe,\\ TestShowSwitchDetail as test_show_switch_detail_iosxe class TestShowVersion(unittest.TestCase):", "self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitch(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "FXS1821Q2H9 SR71000 CPU at 600Mhz, Implementation 0x504, Rev 1.2, 512KB", "Clock FRU 1\" PID: CLK-7600 , VID: , SN: FXS170802GL", "(revision W0) with 262144K bytes of memory. Processor board ID", "'license_level': 'ipservices', 'license_type': 'Permanent', 'main_mem': '262144', 'mem_size': {'flash-simulated non-volatile configuration':", "System image file is \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\" This product contains cryptographic features", "ShowPlatformHardwareQfpBqsStatisticsChannelAll,\\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\\ ShowPlatformHardwareQfpStatisticsDrop,\\ ShowEnvironment,\\ ShowModule,\\ ShowSwitch, ShowSwitchDetail from genie.libs.parser.iosxe.tests.test_show_platform import", "\"Transceiver Te2/6\", \"pid\": \"X2-10GB-SR\", \"sn\": \"FNS153920YJ\", \"vid\": \"V06 \", }", "SN: SAL17152N0F NAME: \"msfc sub-module of 1\", DESCR: \"VS-F6K-MSFC5 CPU", "as test_show_platform_hardware_serdes_statistics_iosxe,\\ TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\\ ShowPlatformHardwareQfpInterface as", "System image file is \"flash0:/vios-adventerprisek9-m\" Last reload reason: Unknown reason", "Wed 29-Mar-17 14:05 by prod_rel_team ROM: Bootstrap program is IOSv", "Ethernet': '2', 'Virtual Ethernet': '2', 'Gigabit Ethernet': '28', 'FastEthernet': '1'", "sending email to <EMAIL>. Cisco IOSv (revision 1.0) with with", "\"sn\": \"ONT17020338\", \"vid\": \"V06 \", } }, \"2\": { \"X2-10GB-SR\":", "RJ-48 Multiflex Trunk - T1/E1\" PID: VWIC2-2MFT-T1/E1 , VID: V01", "Daughterboard Rev. 2.0\" PID: VS-F6K-MSFC5 , VID: , SN: SAL17142D06", "\"X2 Transceiver 10Gbase-SR Te2/2\", \"name\": \"Transceiver Te2/2\", \"pid\": \"X2-10GB-SR\", \"sn\":", "\"sn\": \"SAL171846RF\", \"vid\": \"V02\", } }, \"1\": { \"X2-10GB-SR\": {", "FTX1234AMWT NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1", "slot='0', iotype='ipm') def test_golden_active_ipm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_ipm)", "5.6\" PID: WS-SUP720-3BXL , VID: V05, SN: SAL11434P2C NAME: \"msfc", "'FGL161010K8', 'vid': 'V05 ', }, }, }, 'slot': { '0':", "- Power Supply 1', 'pid': 'PWR-C2-2929WAC', 'sn': 'LIT03728KKK', 'vid': 'V02L", "10 weeks, 5 days, 5 hours, 15 minutes System returned", "\"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\", } } }, \"CLK-7600 2\":", "{ 'WIC-1B-U-V2': { 'descr': 'Wan Interface Card BRI U (2091,", "self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device)", "'sn': 'SPC1519005V', 'vid': 'V03 ', }, }, }, 'vid': 'V00", "(Read/Write) License Info: License UDI: ------------------------------------------------- Device# PID SN -------------------------------------------------", "'V03 ', }, }, }, }, } golden_output_6 = {'execute.return_value':", "SN: FGL161010K8 NAME: \"Cisco Services Performance Engine 150 for Cisco", "- Power Supply 1', 'pid': 'C3KX-PWR-007CBA', 'sn': 'LTP13579L3R', 'vid': 'V01L", "self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def test_golden_port(self): self.device =", "Current Software state = ACTIVE Uptime in current state =", "VID: 1.0, SN: 9K66Z7TOKAACDEQA24N7S '''} golden_parsed_output_2 = { \"main\": {", "'''\\ show processes cpu sorted 5min | inc CPU CPU", "'Cisco Services Performance Engine 150 for Cisco 3900 ISR on", "golden_output_2 = {'execute.return_value': ''' NAME: \"WS-C6504-E\", DESCR: \"Cisco Systems Cisco", "'2', 'Virtual Ethernet': '2', 'Gigabit Ethernet': '28', 'FastEthernet': '1' },", "= {'execute.return_value': ''' NAME: \"WS-C6504-E\", DESCR: \"Cisco Systems Cisco 6500", "'compiled_date': 'Wed 26-Jun-13 09:56', 'hostname': 'R5', 'image_id': 'C3750E-UNIVERSALK9-M', 'image_type': 'production", "NAME: \"PVDMII DSP SIMM with four DSPs on Slot 0", "13, \"five_min_cpu\": 15, \"one_min_cpu\": 23, \"five_sec_cpu_interrupts\": 0 } golden_output =", "'uptime': '1 hour, 20 minutes', 'version': '15.0(1)M7', 'version_short': '15.0', },", "test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(KeyError): parsed_output", "image file is \"flash0:/vios-adventerprisek9-m\" Last reload reason: Unknown reason This", "Slot 0', 'pid': 'AIM-VPN/SSL-3', 'sn': 'FOC758693YO', 'vid': 'V01', }, },", "Motherboard serial number : FDO202907UH Model revision number : W0", "\"msfc sub-module of 1\", \"pid\": \"WS-SUP720\", \"sn\": \"SAL11434N9G\", \"vid\": \"\",", "unittest.mock import Mock from pyats.topology import Device from genie.metaparser.util.exceptions import", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware(test_show_platform_hardware_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active)", "10:27:57 EST Mon Dec 9 2019 System image file is", "\"sn\": \"SAL17152QB3\", \"subslot\": { \"0\": { \"WS-F6K-DFC4-E\": { \"descr\": \"WS-F6K-DFC4-E", "150 for Cisco 3900 ISR', 'name': 'Cisco Services Performance Engine", "\"N95_1\", \"os\": \"IOS\", \"version_short\": \"15.6\", \"number_of_intfs\": { \"Gigabit Ethernet\": \"6\"", "golden_output_iosv = {'execute.return_value': '''\\ Redundant System Information : ------------------------------ Available", "\"five_min_cpu\": 2.77, \"runtime\": 3582279, \"pid\": 84, \"five_sec_cpu\": 0.55 }, 3:", "'GigabitEthernet1/0/49', 'pid': 'GLC-SX-MMD', 'sn': 'ACW102938VS', 'vid': 'V01 ', }, },", "'28', 'Ten Gigabit Ethernet': '2', 'Virtual Ethernet': '2', 'Gigabit Ethernet':", "DESCR: \"WS-F6700-CFC Centralized Forwarding Card Rev. 4.1\" PID: WS-F6700-CFC ,", ", VID: V01, SN: SAL11434LYG NAME: \"2\", DESCR: \"WS-X6748-GE-TX CEF720", "} }, 'license_package': { 'data': { 'license_level': 'datak9', 'license_type': 'Permanent',", "Information : ------------------------------- Active Location = slot 0 Current Software", "NAME: \"CISCO3945-CHASSIS\", DESCR: \"CISCO3945-CHASSIS\" PID: CISCO3945-CHASSIS , VID: V05 ,", "This product contains cryptographic features and is subject to United", "to comply with U.S. and local laws, return this product", "self.device = Mock(**self.golden_output_slot_internal) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0', internal=True)", "= version_obj.parse() self.assertEqual(parsed_output, self.parsed_output) class test_dir(unittest.TestCase): dev1 = Device(name='empty') dev_iosv", "} } }, \"WS-C6503-E-FAN 1\": { \"other\": { \"WS-C6503-E-FAN 1\":", "10/100/1000mb Ethernet Rev. 2.6\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V02\", \"sn\": \"SAL1128UPQ9\",", "NM\" PID: GE-DCARD-ESW , VID: V01 , SN: FOC91864MNN '''}", "\"last_modified_date\": \"Oct 17 2018 18:57:10 +00:00\", \"index\": \"268\", \"size\": \"524288\",", "'name': 'c3845 Motherboard with Gigabit Ethernet on Slot 0', 'pid':", "V01 , SN: ACW102938VS '''} golden_parsed_output_4 = { 'slot': {", "VID: V03 , SN: QCS1604P0BT '''} golden_parsed_output_5 = { 'main':", "def test_golden_output_6(self): self.maxDiff = None self.device = Mock(**self.golden_output_6) obj =", "= {'execute.return_value': '''\\ BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE variable =", "\"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\", \"chassis\": \"WS-C6503-E\", \"main_mem\": \"983008\", \"processor_type\": \"R7000\", 'sp_by': 'power on',", "Dir(device=self.dev1) with self.assertRaises(SchemaMissingKeyError): parsed_output = dir_obj.parse() def test_golden_iosv(self): self.maxDiff =", "NAME: \"2\", DESCR: \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev.", "\"0\": { \"WS-F6700-DFC3CXL\": { \"descr\": \"WS-F6700-DFC3CXL Distributed Forwarding Card 3", "10BaseT/100BaseTX EtherSwitch on Slot 2', 'pid': 'NM-16ESW', 'sn': 'FOC135464KO', 'subslot':", "import Mock from pyats.topology import Device from genie.metaparser.util.exceptions import SchemaEmptyParserError,\\", "{ \"image_ver\": \"Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2,", "48 port 10/100/1000mb Ethernet Rev. 2.6\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V02\",", "Services Performance Engine 150 for Cisco 3900 ISR on Slot", "1': { 'descr': 'C3900 AC Power Supply 1', 'name': 'C3900", "12.2(58r)SE, RELEASE SOFTWARE (fc1)', 'hostname': 'sample_switch', 'uptime': '8 weeks, 3", "'''} golden_parsed_output_7 = { 'main': { 'chassis': { 'CISCO2821': {", "(PowerPC405) processor (revision W0) with 262144K bytes of memory. Processor", "+00:00\", \"index\": \"1\", \"size\": \"0\", \"permissions\": \"drw-\" }, \"vios-adventerprisek9-m\": {", "SubSlot 4\", DESCR: \"PVDMII DSP SIMM with four DSPs\" PID:", "obj.parse(slot='0') class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj =", "Te1/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T1\", \"vid\": \"V06 \", } },", "obj.parse(key_word='CPU', sort_time='5min') self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.dev", "VID: V06 , SN: FNS153920YJ NAME: \"Transceiver Te2/16\", DESCR: \"X2", "Fast Ethernet High Speed WAN Interface Card', 'name': 'Two-Port Fast", "C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1) sample_switch", "= {'execute.return_value': ''} golden_parsed_output_iosv = { 'main': { 'chassis': {", "test_golden_output_6(self): self.maxDiff = None self.device = Mock(**self.golden_output_6) obj = ShowInventory(device=self.device)", "self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowPlatform(device=self.dev_c3850) parsed_output", "\"V04\", } } }, } } }, \"4\": { \"lc\":", "'''} golden_parsed_output_2 = { \"main\": { \"chassis\": { \"WS-C6504-E\": {", "self.golden_parsed_output) class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj =", "ShowRedundancy(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = redundancy_obj.parse() def test_golden_iosv(self): self.maxDiff =", "test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowVersionRp(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "Card 4 EARL sub-module of 3\", \"pid\": \"WS-F6K-DFC4-A\", \"sn\": \"SAL171848KL\",", "\"sn\": \"APS1707008Y\", } } }, \"PS 2 PWR-2700-AC/4\": { \"other\":", "License Type: Permanent Next reload license Level: ipservices cisco WS-C3750X-24P", "2013 00:00:00 +00:00 boot 264 drw- 0 Oct 14 2013", "(fc1) best-c3945-IOS3 uptime is 1 hour, 20 minutes System returned", "17 2018 18:57:18 +00:00 e1000_bia.txt 2142715904 bytes total (1989595136 bytes", "obj.parse(status='active', slot='0') class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def test_golden_serdes(self): self.device = Mock(**self.golden_output_serdes) obj", "\"OSR-7600 Clock FRU 2\" PID: CLK-7600 , VID: , SN:", "(fc1) sample_switch uptime is 8 weeks, 3 days, 10 hours,", "2\" PID: CLK-7600 , VID: , SN: FXS170802GL NAME: \"1\",", "0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675U0D', 'vid': 'V01 ',", "Module on Slot 0\", DESCR: \"Encryption AIM Element\" PID: AIM-VPN/SSL-3", "Ethernet', 'name': 'c3845 Motherboard with Gigabit Ethernet on Slot 0',", "\"2821 chassis\", DESCR: \"2821 chassis\" PID: CISCO2821 , VID: V07", "Mock(**self.empty_output) platform_obj = ShowModule(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def", "'SPC1519005V', 'vid': 'V03 ', }, }, }, 'vid': 'V00 ',", "this product immediately. A summary of U.S. laws governing Cisco", "\"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/6\", \"name\": \"Transceiver Te2/6\",", "VID: V01 , SN: FOC65428K9F NAME: \"Wan Interface Card BRI", "Supply 1': { 'descr': 'C3900 AC Power Supply 1', 'name':", "\"last_reload_reason\": \"s/w reset\", 'processor_board_flash': '65536K', \"number_of_intfs\": { \"Gigabit Ethernet/IEEE 802.3\":", "\"last_reload_reason\": \"Unknown reason\", \"hostname\": \"N95_1\", \"os\": \"IOS\", \"version_short\": \"15.6\", \"number_of_intfs\":", "e1000\", \"OSPF-1 Hello\" ], \"five_sec_cpu_interrupts\": 0 } golden_output_1 = {'execute.return_value':", "and is subject to United States and local country laws", "Top Assembly Revision Number : F0 Version ID : V07", ", SN: DTN1504L0E9 NAME: \"TenGigabitEthernet1/1/1\", DESCR: \"SFP-10GBase-SR\" PID: SFP-10G-SR ,", "\"runtime\": 113457, \"pid\": 412, \"five_sec_cpu\": 0.15 } }, \"five_sec_cpu_total\": 4,", "self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowModule(device=self.dev_c3850) parsed_output", "Rev. 5.6\", \"pid\": \"WS-SUP720-3BXL\", \"vid\": \"V05\", \"sn\": \"SAL11434P2C\", \"subslot\": {", "self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(status='active') def test_golden_active(self): self.maxDiff = None self.device", "2017 00:00:00 +00:00\", \"index\": \"267\", \"size\": \"147988420\", \"permissions\": \"-rw-\" }", "= { 'main': { 'chassis': { 'CISCO2821': { 'descr': '2821", "}, }, }, }, }, }, }, } golden_output_8 =", "parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_empty(self):", "self.assertEqual(parsed_output, self.golden_parsed_output_4) def test_golden_output_5(self): self.maxDiff = None self.device = Mock(**self.golden_output_5)", "1 PWR-2700-AC/4\": { \"other\": { \"PS 1 PWR-2700-AC/4\": { \"name\":", "\"WS-F6700-CFC Centralized Forwarding Card Rev. 4.1\", \"name\": \"WS-F6700-CFC Centralized Forwarding", "Oct 17 2018 18:57:10 +00:00 nvram 269 -rw- 119 Oct", "on Slot 0 SubSlot 4', 'pid': 'PVDM2-64', 'sn': 'FOC63358WSI', 'vid':", "\"X2 Transceiver 10Gbase-SR Te2/4\", \"name\": \"Transceiver Te2/4\", \"pid\": \"X2-10GB-SR\", \"sn\":", "Version 12.2(17r)S4, RELEASE SOFTWARE (fc1) BOOTLDR: s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version", ", VID: V01 , SN: FOC135464KO NAME: \"Gigabit(1000BaseT) module for", "Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0', 'pid':", "parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_golden_active_opm(self):", "= Mock(**self.empty_output) platform_obj = ShowSwitchDetail(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse()", "obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_empty(self): self.device1 =", ", SN: FOC63358WSI NAME: \"High Density Voice Module - 8FXS/DID", "\"15.6(3)M2\", \"rtr_type\": \"IOSv\", \"chassis_sn\": \"9K66Z7TOKAACDEQA24N7S\", \"chassis\": \"IOSv\", \"image_id\": \"VIOS-ADVENTERPRISEK9-M\", 'compiled_by':", "'FOC2837465E', 'vid': 'V01', 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr':", "with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None", "None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitch(device=self.dev_c3850) parsed_output = platform_obj.parse()", "power-on 14 Virtual Ethernet interfaces 1 FastEthernet interface 28 Gigabit", "\"VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0\" PID: VS-F6K-PFC4 ,", "contains cryptographic features and is subject to United States and", "None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_empty(self): self.device1 = Mock(**self.empty_output) obj =", "Distributed Forwarding Card 4 Rev. 1.0\", \"name\": \"WS-F6K-DFC4-A Distributed Forwarding", "Power Supply\" PID: PWR-C2-2929WAC , VID: V02L , SN: LIT03728KKK", "18:57:10 +00:00 nvram 269 -rw- 119 Oct 17 2018 18:57:18", "{ 'descr': 'CISCO3945-CHASSIS', 'name': 'CISCO3945-CHASSIS', 'pid': 'CISCO3945-CHASSIS', 'sn': 'FGL161010K8', 'vid':", "EtherSwitch NM\" PID: GE-DCARD-ESW , VID: V01 , SN: FOC91864MNN", "def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) inventory_obj =", "'FOC135464KO', 'subslot': { '0': { 'GE-DCARD-ESW': { 'descr': 'Gigabit(1000BaseT) module", "0 Jan 30 2013 00:00:00 +00:00 boot 264 drw- 0", "= ShowProcessesCpu(device=self.device) parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output)", "inventory_obj = ShowInventory(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = inventory_obj.parse() def test_golden_iosv(self):", "class test_show_platform_power(test_show_platform_power_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformPower(device=self.device)", "Network (VPN) Module on Slot 0\", DESCR: \"Encryption AIM Element\"", "Transceiver 10Gbase-SR Te2/4\" PID: X2-10GB-SR , VID: V06 , SN:", "cycle at 21:57:23 UTC Sat Aug 28 2010 (SP by", "self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitch(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850)", "self.dev_iosv = Mock(**self.golden_output_ios_1) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output,", "Mock(**self.semi_empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(KeyError): parsed_output = version_obj.parse() def", "SN: ONT1702020H NAME: \"Transceiver Te2/3\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/3\"", "'sn': 'FOC91864MNN', 'vid': 'V01 ', }, }, }, 'vid': 'V01", "Slot 2\", DESCR: \"16 Port 10BaseT/100BaseTX EtherSwitch\" PID: NM-16ESW ,", "None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitchDetail(device=self.dev_c3850) parsed_output = platform_obj.parse()", "'sn': 'FTX1234AMWT', 'vid': 'V07 ', }, }, }, 'slot': {", "'processor_type': 'C3900-SPE150/K9', 'returned_to_rom_at': '10:26:47 EST Mon Dec 9 2019', 'returned_to_rom_by':", "412, \"five_sec_cpu\": 0.15 } }, \"five_sec_cpu_total\": 4, \"five_min_cpu\": 9, \"one_min_cpu\":", "1\", DESCR: \"Six port FXO voice interface daughtercard\" PID: EM-HDA-6FXO", "Disabled Communications = Down Reason: Failure Current Processor Information :", "{ \"WS-F6K-DFC4-E\": { \"descr\": \"WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev.", "'2000880K', 'processor_type': 'C3900-SPE150/K9', 'returned_to_rom_at': '10:26:47 EST Mon Dec 9 2019',", "ipservices cisco WS-C3750X-24P (PowerPC405) processor (revision W0) with 262144K bytes", "parsed_output = dir_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_redundancy(unittest.TestCase): dev1 = Device(name='empty')", "port 1000mb SFP Rev. 1.0\", \"pid\": \"WS-X6824-SFP\", \"vid\": \"V01\", \"sn\":", "module for EtherSwitch NM', 'name': 'Gigabit(1000BaseT) module for EtherSwitch NM", "= ShowPlatformHardwareQfpStatisticsDrop( device=self.device) parsed_output = platform_obj.parse(status='active') self.assertEqual(parsed_output, self.golden_parsed_output_active) class test_show_env(test_show_env_iosxe):", "platform_obj = ShowPlatform(device=self.dev2) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden_c3850(self):", "}, }, }, 'vid': 'V01 ', }, }, }, },", "'vid': 'V03 ', }, }, }, }, } golden_output_6 =", "SN: ONT17020338 NAME: \"Transceiver Te2/2\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/2\"", "'mem_size': {'flash-simulated non-volatile configuration': '512'}, 'next_reload_license_level': 'ipservices', 'number_of_intfs': {'Gigabit Ethernet':", "email to <EMAIL>. License Level: ipservices License Type: Permanent Next", "\"other\": { \"WS-C6503-E-FAN 1\": { \"name\": \"WS-C6503-E-FAN 1\", \"descr\": \"Enhanced", "= 0 Last switchover reason = unsupported Hardware Mode =", "'processor_board_flash': '65536K', \"number_of_intfs\": { \"Gigabit Ethernet/IEEE 802.3\": \"50\", 'Virtual Ethernet/IEEE", "'chassis': 'CISCO3945-CHASSIS', 'chassis_sn': 'FGL161010K8', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Fri 05-Aug-11 00:32',", "NAME: \"WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4\", DESCR:", "'security': { 'license_level': 'securityk9', 'license_type': 'Permanent', 'next_reload_license_level': 'securityk9', }, 'uc':", "= ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_3) def test_golden_output_4(self): self.maxDiff", "BOOTLDR variable = Configuration register is 0x2012 Standby not ready", "\"sn\": \"SAL17152EG9\", \"subslot\": { \"0\": { \"WS-F6K-DFC4-A\": { \"descr\": \"WS-F6K-DFC4-A", "\"Enhanced 3-slot Fan Tray 1\", \"pid\": \"WS-C6503-E-FAN\", \"vid\": \"V02\", \"sn\":", "16 hours, 42 minutes System returned to ROM by reload", "Policy Feature Card 4 Rev. 2.0\" PID: VS-F6K-PFC4 , VID:", "test_show_platform_hardware_serdes_statistics_internal_iosxe,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\\ ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\\ TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\\", "30 WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_cat6k", "\"V05 \", } }, \"6\": { \"X2-10GB-SR\": { \"descr\": \"X2", "'pid': 'SFP-10G-SR', 'sn': 'SPC1519005V', 'vid': 'V03 ', }, }, },", "Supply', 'name': 'Switch 1 - Power Supply 1', 'pid': 'PWR-C2-2929WAC',", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_3) def test_golden_output_4(self): self.maxDiff = None self.device", "Voice Module - 8FXS/DID on Slot 1\", DESCR: \"High Density", "SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2017 by Cisco", "= {'execute.return_value': ''' show version Cisco Internetwork Operating System Software", "{ \"descr\": \"WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8\", \"name\":", "= { \"version\": { \"last_reload_reason\": \"Unknown reason\", \"hostname\": \"N95_1\", \"os\":", "Dec 9 2019', 'uptime': '1 hour, 20 minutes', 'version': '15.0(1)M7',", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_6) def test_golden_output_7(self): self.maxDiff = None self.device", "SuperLAT software (copyright 1990 by Meridian Technology Corp). X.25 software,", "of Flash internal SIMM (Sector size 512K). Configuration register is", "test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics(", "VID: V02L , SN: LIT03728KKK NAME: \"Switch 1 - FlexStackPlus", "'CISCO3825': { 'descr': '3825 chassis', 'name': '3825 chassis', 'pid': 'CISCO3825',", "PID: SM-ES2-16-P , VID: , SN: FOC09876NP3 '''} golden_parsed_output_6 =", "\"CLK-7600 2\", \"descr\": \"OSR-7600 Clock FRU 2\", \"pid\": \"CLK-7600\", \"vid\":", "status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_golden_standby(self): self.device =", "day, 16 hours, 42 minutes Image Version = Cisco IOS", "5Sec 1Min 5Min TTY Process 368 362874 3321960 109 1.03%", "'1', 'pid': 'WS-C1010XR-48FPS-I', 'sn': 'FD2043B0K3', 'subslot': { '1': { 'C1010X-STACK':", "self.golden_parsed_output) class test_show_platform_hardware(test_show_platform_hardware_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj =", "'WS-C1010XR-48FPS-I', 'name': '1', 'pid': 'WS-C1010XR-48FPS-I', 'sn': 'FD2043B0K3', 'subslot': { '1':", "= ShowProcessesCpuPlatform(device=self.device) parsed_output = cpu_platform_obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output)", "}, }, }, } golden_output_7 = {'execute.return_value': ''' NAME: \"2821", "chassis', 'name': '2821 chassis', 'pid': 'CISCO2821', 'sn': 'FTX1234AMWT', 'vid': 'V07", "Virtual Ethernet/IEEE 802.3 interface 50 Gigabit Ethernet/IEEE 802.3 interfaces 1917K", "self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardware(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "V01 , SN: FOC65428K9F NAME: \"Wan Interface Card BRI U", "{'execute.return_value': ''} golden_parsed_output_iosv = { \"red_sys_info\": { \"last_switchover_reason\": \"unsupported\", \"maint_mode\":", "NAME: \"WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of 2\",", "of non-volatile configuration memory. 8192K bytes of packet buffer memory.", "ShowPlatformSoftwareStatusControl(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff =", "\"Disabled\", \"switchovers_system_experienced\": \"0\", \"available_system_uptime\": \"0 minutes\", \"communications\": \"Down\", \"hw_mode\": \"Simplex\",", "VID: V03, SN: APS17070093 '''} golden_parsed_output_3 = { \"main\": {", "PWR-1400-AC\", DESCR: \"AC power supply, 1400 watt 1\" PID: PWR-1400-AC", "Last reset from power-on 2 Virtual Ethernet interfaces 1 FastEthernet", "DSP SIMM with four DSPs on Slot 0 SubSlot 4',", "PID: CISCO2821 , VID: V07 , SN: FTX1234AMWT NAME: \"VWIC2-2MFT-T1/E1", "\"V04\", \"sn\": \"SAL14017TWF\", \"subslot\": { \"0\": { \"WS-F6700-CFC\": { \"descr\":", "chassis\", DESCR: \"3825 chassis\" PID: CISCO3825 , VID: V05 ,", "'sn': 'FOC98675U0D', 'vid': 'V01 ', }, }, '1': { 'VWIC2-2MFT-T1/E1':", "119 Oct 17 2018 18:57:18 +00:00 e1000_bia.txt 2142715904 bytes total", "slot='0', iotype='ipm') self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_golden_active_opm(self): self.maxDiff = None self.device", "self.device = Mock(**self.golden_output_7) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output,", "\"name\": \"WS-C6503-E\", \"descr\": \"Cisco Systems Catalyst 6500 3-slot Chassis System\",", "\"vid\": \"V06 \", } }, \"2\": { \"X2-10GB-SR\": { \"descr\":", "PID: X2-10GB-SR , VID: V06 , SN: ONT1702020H NAME: \"Transceiver", "'EM-HDA-6FXO': { 'descr': 'Six port FXO voice interface daughtercard', 'name':", "class test_show_version_rp(test_show_version_rp_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowVersionRp(device=self.device)", "'vid': 'V01L ', }, }, '2/1/1': { 'SFP-10G-LR': { 'descr':", "'descr': 'IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0', 'name':", "class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device)", "\"Transceiver Te2/16\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170201TT\", \"vid\": \"V06 \", }", "at 05:06:40 GMT Tue Sep 10 2019 System image file", "4\", DESCR: \"PVDMII DSP SIMM with four DSPs\" PID: PVDM2-64", "from genie.metaparser.util.exceptions import SchemaEmptyParserError,\\ SchemaMissingKeyError from genie.libs.parser.ios.show_platform import ShowVersion,\\ Dir,\\", "genie.libs.parser.ios.show_platform import ShowVersion,\\ Dir,\\ ShowRedundancy,\\ ShowInventory,\\ ShowBootvar, \\ ShowProcessesCpuSorted,\\ ShowProcessesCpu,\\", "'sn': 'FOC2837465E', 'vid': 'V01', 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': {", "from power-on 14 Virtual Ethernet interfaces 1 FastEthernet interface 28", "}, \"dir\": \"flash0:/\" } } golden_output_iosv = {'execute.return_value': '''\\ Directory", "}, } golden_output_8 = {'execute.return_value': ''' NAME: \"3825 chassis\", DESCR:", "\"5\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te1/5\", \"name\":", "version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_1) def test_golden_ios_2(self): self.maxDiff = None self.dev_iosv =", "Hello '''} def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowProcessesCpuSorted(device=self.dev)", "= platform_obj.parse( status='active', slot='0', iotype='ipm') def test_golden_active_ipm(self): self.maxDiff = None", "<EMAIL>. cisco WS-C6503-E (R7000) processor (revision 1.4) with 983008K/65536K bytes", "PID: X2-10GB-SR , VID: V06 , SN: ONT170202T1 NAME: \"Transceiver", "{ \"WS-X6816-10GE\": { \"name\": \"2\", \"descr\": \"WS-X6816-10GE CEF720 16 port", "{'execute.return_value': '''\\ Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2,", "bytes total (1989595136 bytes free) '''} def test_empty(self): self.dev1 =", "= Mock(**self.empty_output) inventory_obj = ShowInventory(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = inventory_obj.parse()", "def test_golden_output_2(self): self.maxDiff = None self.device = Mock(**self.golden_output_2) obj =", "\"runtime\": 3582279, \"pid\": 84, \"five_sec_cpu\": 0.55 }, 3: { \"invoked\":", "immediately. A summary of U.S. laws governing Cisco cryptographic products", "datak9 Configuration register is 0x2102 '''} parsed_output = { 'version':", "class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowProcessesCpuHistory(device=self.device)", "'ipservices', 'license_type': 'Permanent', 'main_mem': '262144', 'mem_size': {'flash-simulated non-volatile configuration': '512'},", "\"2\", DESCR: \"WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0\" PID:", "\"WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0\", \"pid\": \"WS-X6816-10GE\", \"vid\":", "def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowModule(device=self.dev1) with self.assertRaises(SchemaEmptyParserError):", "\"five_sec_cpu_total\": 4, \"five_min_cpu\": 9, \"one_min_cpu\": 4, \"nonzero_cpu_processes\": [ \"PIM Process\",", "BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)", "Mock(**self.golden_output_iosv) inventory_obj = ShowInventory(device=self.dev_iosv) parsed_output = inventory_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def", "= platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') def test_golden(self): self.maxDiff = None", "FXO voice interface daughtercard', 'name': 'Six port FXO voice interface", "'descr': 'c3845 Motherboard with Gigabit Ethernet', 'name': 'c3845 Motherboard with", "\"PS 2 PWR-2700-AC/4\": { \"name\": \"PS 2 PWR-2700-AC/4\", \"descr\": \"2700W", "Slot 0\", DESCR: \"Cisco Services Performance Engine 150 for Cisco", "\"WS-X6748-GE-TX\": { \"name\": \"2\", \"descr\": \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb", "'2': { 'C3KX-PWR-007CBA': { 'descr': 'BCA Power Supply', 'name': 'Switch", "NAME: \"Six port FXO voice interface daughtercard on Slot 1", "def test_golden_output_3(self): self.maxDiff = None self.device = Mock(**self.golden_output_3) obj =", "ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_opm)", "\"lc\": { \"WS-X6824-SFP\": { \"name\": \"3\", \"descr\": \"WS-X6824-SFP CEF720 24", "{ '0': { 'GE-DCARD-ESW': { 'descr': 'Gigabit(1000BaseT) module for EtherSwitch", "self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(port='0/0/0') class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def test_golden_active_opm(self): self.device =", "Ethernet/IEEE 802.3 interface 50 Gigabit Ethernet/IEEE 802.3 interfaces 1917K bytes", "'CISCO3945-CHASSIS', 'name': 'CISCO3945-CHASSIS', 'pid': 'CISCO3945-CHASSIS', 'sn': 'FGL161010K8', 'vid': 'V05 ',", "113457, \"pid\": 412, \"five_sec_cpu\": 0.15 } }, \"five_sec_cpu_total\": 4, \"five_min_cpu\":", "{\"non-volatile configuration\": \"1917\", \"packet buffer\": \"8192\"}, \"curr_config_register\": \"0x2102\", } }", "Configuration register is 0x2012 Standby not ready to show bootvar", "}, }, '1': { 'lc': { 'NM-1T3/E3=': { 'descr': 'Clear/Subrate", "DESCR: \"WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0\" PID: WS-F6K-DFC4-A", "DESCR: \"Gigabit(1000BaseT) module for EtherSwitch NM\" PID: GE-DCARD-ESW , VID:", "NAME: \"GigabitEthernet1/0/49\", DESCR: \"1000BaseSX SFP\" PID: GLC-SX-MMD , VID: V01", "Device(name='iosv') empty_output = {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\\ ROM:", "{ 'CISCO3945-CHASSIS': { 'descr': 'CISCO3945-CHASSIS', 'name': 'CISCO3945-CHASSIS', 'pid': 'CISCO3945-CHASSIS', 'sn':", "\"SAL11434LYG\", \"vid\": \"V01\", }, } }, } } }, \"2\":", "Module - 8FXS/DID on Slot 1', 'pid': 'EVM-HD-8FXS/DID', 'sn': 'FOC65798TG8',", "version_obj = ShowVersion(device=self.dev1) with self.assertRaises(AttributeError): parsered_output = version_obj.parse() def test_semi_empty(self):", "= platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv)", "Mock(**self.golden_output_active_ipm) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff =", "(s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright", "\"X2 Transceiver 10Gbase-SR Te2/16\" PID: X2-10GB-SR , VID: V06 ,", "= inventory_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv)", "PWR-2700-AC/4\": { \"name\": \"PS 1 PWR-2700-AC/4\", \"descr\": \"2700W AC power", "Image text-base: 0x40101040, data-base: 0x42D98000 ROM: System Bootstrap, Version 12.2(17r)S4,", "{ \"descr\": \"X2 Transceiver 10Gbase-SR Te2/4\", \"name\": \"Transceiver Te2/4\", \"pid\":", "'0xF', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 26-Jun-13 09:56', 'hostname': 'R5', 'image_id':", "since cat6k_tb1 switched to active is 10 weeks, 5 days,", "Gigabit Ethernet on Slot 0\", DESCR: \"c3845 Motherboard with Gigabit", "'pid': 'C3KX-PWR-350WAC', 'sn': 'DTN1504L0E9', 'vid': 'V01D ', }, }, '1/1/1':", "Mode = Disabled Communications = Down Reason: Failure Current Processor", "'vid': '', }, }, }, }, } golden_output_7 = {'execute.return_value':", "IOSv N95_1 uptime is 1 day, 16 hours, 42 minutes", "self.assertEqual(parsed_output, self.golden_parsed_output_port) def test_golden_slot(self): self.device = Mock(**self.golden_output_slot) obj = ShowPlatformHardwarePlim(device=self.device)", "}, }, }, '16': { 'lc': { 'NM-16ESW': { 'descr':", "VID: V02, SN: SAL171846RF NAME: \"Transceiver Te2/1\", DESCR: \"X2 Transceiver", ", SN: FOC09876NP3 '''} golden_parsed_output_6 = { 'slot': { '1':", "utilization for five seconds: 4%/0%; one minute: 4%; five minutes:", "platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='opm')", "Te2/5\", \"name\": \"Transceiver Te2/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"AGA1515XZE2\", \"vid\": \"V05", "image file is \"flash0:c3900-universalk9-mz.SPA.150-1.M7.bin\" Last reload type: Normal Reload Last", "be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please", "3.1\", \"name\": \"msfc sub-module of 1\", \"pid\": \"WS-SUP720\", \"sn\": \"SAL11434N9G\",", "10Gbase-SR Te2/5\", \"name\": \"Transceiver Te2/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"AGA1515XZE2\", \"vid\":", "} }, \"16\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR", "0 SubSlot 1', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675W3E', 'vid': 'V01 ',", "WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_1 =", "= ShowSwitchDetail(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff", "test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) inventory_obj = ShowInventory(device=self.dev_iosv)", "self.assertRaises(SchemaEmptyParserError): parsed_output = inventory_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv", "1000mb SFP Rev. 1.0\" PID: WS-X6824-SFP , VID: V01, SN:", "= None self.assertEqual(parsed_output, self.golden_parsed_output_port) def test_golden_slot(self): self.device = Mock(**self.golden_output_slot) obj", "\"V01\", \"sn\": \"ABC0830J127\", } } }, }, } golden_output_3 =", "'2821 chassis', 'name': '2821 chassis', 'pid': 'CISCO2821', 'sn': 'FTX1234AMWT', 'vid':", "ShowPlatformHardwareSerdesInternal(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def test_empty(self):", "= Mock(**self.golden_output_standby_offline) obj = ShowVersionRp(device=self.device) self.maxDiff = None with self.assertRaises(SchemaEmptyParserError):", "ShowInventory(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = inventory_obj.parse() def test_golden_iosv(self): self.maxDiff =", "2 Ten Gigabit Ethernet interfaces The password-recovery mechanism is enabled.", "class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpStatisticsDrop(", "ShowBootvar(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff =", "ShowPlatformSoftwareStatusControl,\\ ShowPlatformSoftwareSlotActiveMonitorMem,\\ ShowPlatformHardware,\\ ShowPlatformHardwarePlim,\\ ShowPlatformHardwareQfpBqsOpmMapping,\\ ShowPlatformHardwareQfpBqsIpmMapping,\\ ShowPlatformHardwareSerdes,\\ ShowPlatformHardwareSerdesInternal,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\\", "'12.2(55)SE8', 'version_short': '12.2' } } golden_output_ios = {'execute.return_value': '''\\ Cisco", "2.6\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V02\", \"sn\": \"SAL1128UPQ9\", \"subslot\": { \"0\":", "Module:'c3900' ----------------------------------------------------------------- Technology Technology-package Technology-package Current Type Next reboot ------------------------------------------------------------------", "parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') def test_golden(self): self.maxDiff =", "System Software IOS (tm) s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE", "\"name\": \"msfc sub-module of 1\", \"pid\": \"VS-F6K-MSFC5\", \"sn\": \"SAL17142D06\", \"vid\":", "RELEASE SOFTWARE (fc1)', 'hostname': 'sample_switch', 'uptime': '8 weeks, 3 days,", "\"nonzero_cpu_processes\": [ \"PIM Process\", \"IOSv e1000\", \"OSPF-1 Hello\" ], \"five_sec_cpu_interrupts\":", "'processor_type': 'PowerPC405', 'returned_to_rom_by': 'power-on', 'rom': 'Bootstrap program is C3750E boot", "VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675W3E NAME: \"Virtual Private", "VID: V06 , SN: ONT17020338 NAME: \"Transceiver Te2/2\", DESCR: \"X2", "'pid': 'HWIC-2FE', 'sn': 'FOC16062824', 'vid': 'V02 ', }, }, },", "as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\\ TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\\ TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll as", "48 port 10/100/1000mb Ethernet Rev. 3.4\" PID: WS-X6748-GE-TX , VID:", "}, 'uc': { 'license_level': 'None', 'license_type': 'None', 'next_reload_license_level': 'None', },", "Mock(**self.empty_output) platform_obj = ShowBootvar(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def", "reset from power-on 2 Virtual Ethernet interfaces 1 FastEthernet interface", "IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)\", \"uptime_in_curr_state\": \"1", "2097152K bytes of ATA System CompactFlash 0 (Read/Write) 0K bytes", "{ \"files\": { \"e1000_bia.txt\": { \"last_modified_date\": \"Oct 17 2018 18:57:18", "to ROM by power-on System restarted at 05:06:40 GMT Tue", "self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') def test_golden_active_ipm(self): self.maxDiff", "with Gigabit Ethernet on Slot 0\", DESCR: \"c3845 Motherboard with", "non-volatile configuration memory. Base ethernet MAC Address : 84:3D:C6:FF:F1:B8 Motherboard", "Mock(**self.golden_output_3) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_3) def", ", SN: FOC16062824 NAME: \"C3900 AC Power Supply 1\", DESCR:", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_6) def test_golden_output_7(self): self.maxDiff = None self.device =", ", VID: V02 , SN: FD5678Z90P NAME: \"Switch 2 -", "\"BCA Power Supply\" PID: C3KX-PWR-007CBA , VID: V01L , SN:", "262144K bytes of memory. Processor board ID FDO2028F1WK Last reset", "= platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def test_golden(self): self.device =", ", VID: V05 , SN: FOC16050QP6 NAME: \"Two-Port Fast Ethernet", "{ 'descr': 'WS-C8888X-88', 'name': '1', 'pid': 'WS-C0123X-45T-S', 'sn': 'FDO123R12W', 'subslot':", "2017 00:00:00 +00:00 vios-adventerprisek9-m 268 -rw- 524288 Oct 17 2018", "class test_show_platform_hardware(test_show_platform_hardware_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowPlatformHardware(device=self.device)", "\"2\": { \"lc\": { \"WS-X6816-10GE\": { \"name\": \"2\", \"descr\": \"WS-X6816-10GE", "to active is 10 weeks, 5 days, 5 hours, 15", "Port 10BaseT/100BaseTX EtherSwitch', 'name': '16 Port 10BaseT/100BaseTX EtherSwitch on Slot", "'vid': 'V01D ', }, }, '1/1/1': { 'SFP-10G-SR': { 'descr':", "28 2010', 'returned_to_rom_by': 'power cycle', \"rtr_type\": \"WS-C6503-E\", \"chassis_sn\": \"FXS1821Q2H9\", \"last_reload_reason\":", "supply for CISCO7604 2\" PID: PWR-2700-AC/4 , VID: V03, SN:", "parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def test_empty(self): self.device", "test_golden(self): self.device = Mock(**self.golden_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device) parsed_output = cpu_platform_obj.parse()", "obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class", "{ \"FAN-MOD-4HS 1\": { \"name\": \"FAN-MOD-4HS 1\", \"descr\": \"High Speed", "}, \"4\": { \"lc\": { \"WS-X6748-GE-TX\": { \"name\": \"4\", \"descr\":", "DESCR: \"Cisco Services Performance Engine 150 for Cisco 3900 ISR\"", "\"pid\": \"WS-X6824-SFP\", \"vid\": \"V01\", \"sn\": \"SAL17152EG9\", \"subslot\": { \"0\": {", "(C3750E-UNIVERSALK9-M), Version 12.2(55)SE8, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright", "1\", DESCR: \"High Speed Fan Module for CISCO7604 1\" PID:", "PID: GE-DCARD-ESW , VID: V01 , SN: FOC91864MNN '''} golden_parsed_output_9", "0, \"one_min_cpu\": 0.87, \"process\": \"IOSv e1000\", \"five_min_cpu\": 2.77, \"runtime\": 3582279,", "C3KX-PWR-350WAC , VID: V01D , SN: DTN1504L0E9 NAME: \"TenGigabitEthernet1/1/1\", DESCR:", "{ \"CLK-7600 2\": { \"name\": \"CLK-7600 2\", \"descr\": \"OSR-7600 Clock", "United States and local country laws governing import, export, transfer", "6 Gigabit Ethernet interfaces DRAM configuration is 72 bits wide", "} golden_output_5 = {'execute.return_value': ''' best-c3945-IOS3#show inventory NAME: \"CISCO3945-CHASSIS\", DESCR:", "2019 System image file is \"flash:c3750e-universalk9-mz.152-2.E8.bin\" Last reload reason: Reload", "'sn': 'FOC16050QP6', 'subslot': { '3': { 'HWIC-2FE': { 'descr': 'Two-Port", "AGA1515XZE2 NAME: \"Transceiver Te2/6\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/6\" PID:", "\"WS-F6K-PFC3BXL\", \"sn\": \"SAL11434LYG\", \"vid\": \"V01\", }, } }, } }", "\", } }, \"2\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver", "= Mock(**self.golden_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device) parsed_output = cpu_platform_obj.parse() self.maxDiff =", "self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff = None self.dev", "def test_golden_active_ipm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_ipm) platform_obj =", "720 Rev. 5.6\", \"pid\": \"WS-SUP720-3BXL\", \"vid\": \"V05\", \"sn\": \"SAL11434P2C\", \"subslot\":", "----- ---------- ---------- * 1 30 WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M Configuration", "CISCO7604 2\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS17070093\", } }", "CLK-7600 , VID: , SN: FXS181101V4 NAME: \"CLK-7600 2\", DESCR:", "\"descr\": \"2700W AC power supply for CISCO7604 1\", \"pid\": \"PWR-2700-AC/4\",", "self.device = Mock(**self.golden_output_standby_offline) obj = ShowVersionRp(device=self.device) self.maxDiff = None with", "} } }, \"CLK-7600 2\": { \"other\": { \"CLK-7600 2\":", "'processor_type': 'PowerPC405', 'rtr_type': 'WS-C3750X-24S', 'chassis_sn': 'FDO1633Q14S', 'number_of_intfs': { 'Virtual Ethernet':", "\"sn\": \"AGA1515XZE2\", \"vid\": \"V05 \", } }, \"6\": { \"X2-10GB-SR\":", "'descr': 'BCA Power Supply', 'name': 'Switch 2 - Power Supply", "test_golden_output_2(self): self.maxDiff = None self.device = Mock(**self.golden_output_2) obj = ShowInventory(device=self.device)", "WAN on Slot 1', 'pid': 'NM-1T3/E3=', 'sn': 'FOC28476ADM', 'vid': 'V01", "sub-module of 1\", \"pid\": \"WS-SUP720\", \"sn\": \"SAL11434N9G\", \"vid\": \"\", },", "memory. Base ethernet MAC Address : 84:3D:C6:FF:F1:B8 Motherboard assembly number", "Forwarding Card 4 EARL sub-module of 3\", \"pid\": \"WS-F6K-DFC4-A\", \"sn\":", "parsed_output = obj.parse(port='0/0/0') class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def test_golden_active_opm(self): self.device = Mock(**self.golden_output_active_opm)", "Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 12.2(55)SE8, RELEASE SOFTWARE (fc2) Technical", "13%/0%; one minute: 23%; five minutes: 15% '''} golden_parsed_output_1 =", "= ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse(key_word='CPU', sort_time='5min') self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self):", "0', 'pid': 'GE-DCARD-ESW', 'sn': 'FOC91864MNN', 'vid': 'V01 ', }, },", "Bootstrap program is C3750E boot loader BOOTLDR: C3750E Boot Loader", "+00:00 config 267 -rw- 147988420 Mar 29 2017 00:00:00 +00:00", "bits wide with parity disabled. 256K bytes of non-volatile configuration", "\"V03\", }, }, \"4\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver", "\"image_ver\": \"Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE", "'05:06:40 GMT Tue Sep 10 2019', 'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason': 'power-on',", "- Power Supply 1', 'pid': 'C3KX-PWR-350WAC', 'sn': 'DTN1504L0E9', 'vid': 'V01D", "contact us by sending email to <EMAIL>. Cisco CISCO3945-CHASSIS (revision", "\"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\", } } }, \"CLK-7600", "= ShowPlatformHardware(device=self.device) parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active)", "\"sn\": \"SAL11434LYG\", \"vid\": \"V01\", }, } }, } } },", "\"ABC0830J127\", } } }, }, } golden_output_3 = {'execute.return_value': '''", "to <EMAIL>. Cisco CISCO3945-CHASSIS (revision 1.1) with C3900-SPE150/K9 with 2027520K/69632K", "\"vid\": \"V02\", \"sn\": \"DCH183500KW\", } } }, \"PS 1 PWR-1400-AC\":", "\"V06 \", } }, }, } } }, \"3\": {", "minutes Switchovers system experienced = 0 Standby failures = 0", "on Slot 1\", DESCR: \"Clear/Subrate T3/E3 WAN\" PID: NM-1T3/E3= ,", "Device from genie.metaparser.util.exceptions import SchemaEmptyParserError,\\ SchemaMissingKeyError from genie.libs.parser.ios.show_platform import ShowVersion,\\", "DESCR: \"BCA Power Supply\" PID: C3KX-PWR-007CBA , VID: V01L ,", "EtherSwitch on Slot 2\", DESCR: \"16 Port 10BaseT/100BaseTX EtherSwitch\" PID:", "'number_of_intfs': { 'FastEthernet': '2', 'Gigabit Ethernet': '3', }, 'os': 'IOS',", "file is \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\" This product contains cryptographic features and is", "self.dev = Mock(**self.empty_output) obj = ShowProcessesCpuSorted(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output =", "10 hours, 27 minutes', 'returned_to_rom_by': 'power-on', 'system_restarted_at': '05:06:40 GMT Tue", "parsed_output = inventory_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_output_2(self): self.maxDiff = None", "\"sn\": \"SAL14017TWF\", \"subslot\": { \"0\": { \"WS-F6700-CFC\": { \"descr\": \"WS-F6700-CFC", "SN: FTX1234AMWT NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk -", "'name': 'High Density Voice Module - 8FXS/DID on Slot 1',", "size 512K). Configuration register is 0x2102 '''} golden_output_ios_1 = {'execute.return_value':", "Cisco 6500 4-slot Chassis System\", \"pid\": \"WS-C6504-E\", \"vid\": \"V01\", \"sn\":", "'2': { 'rp': { 'WS-C3210X-48T-S': { 'descr': 'WS-C3210X-48', 'name': '2',", "0 SubSlot 1\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk", "platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(status='active') def", "15% '''} golden_parsed_output_1 = { \"sort\": { 1: { \"invoked\":", "{ \"PS 1 PWR-1400-AC\": { \"name\": \"PS 1 PWR-1400-AC\", \"descr\":", ", VID: V03, SN: SAL17163901 NAME: \"Transceiver Te1/4\", DESCR: \"X2", "Mock(**self.empty_output) obj = ShowPlatformHardwarePlim(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(port='0/0/0') class", "\"PWR-1400-AC\", \"vid\": \"V01\", \"sn\": \"ABC0830J127\", } } }, }, }", "} }, }, } golden_output_3 = {'execute.return_value': ''' # show", "\"WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2\", \"name\": \"WS-F6K-DFC4-E Distributed", "\"AC power supply, 1400 watt 1\", \"pid\": \"PWR-1400-AC\", \"vid\": \"V01\",", "\"PS 1 PWR-1400-AC\": { \"other\": { \"PS 1 PWR-1400-AC\": {", "bytes of memory. Processor board ID FDO2028F1WK Last reset from", "= {'execute.return_value': ''' # show inventory NAME: \"WS-C6503-E\", DESCR: \"Cisco", ", VID: V01L , SN: LTP13579L3R NAME: \"TenGigabitEthernet2/1/1\", DESCR: \"SFP-10GBase-LR\"", "'WS-C3750X-24P', 'chassis_sn': 'FDO2028F1WK', 'curr_config_register': '0xF', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 26-Jun-13", "Delivery of Cisco cryptographic products does not imply third-party authority", "Slot 0 SubSlot 0\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex", "reason\", \"hostname\": \"N95_1\", \"os\": \"IOS\", \"version_short\": \"15.6\", \"number_of_intfs\": { \"Gigabit", "test_show_switch(test_show_switch_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitch(device=self.dev1) with", "self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1) obj", "golden_parsed_output_3 = { \"main\": { \"chassis\": { \"WS-C6503-E\": { \"name\":", "'C3900-SPE150/K9', 'sn': 'FOC16050QP6' } } }, 'license_package': { 'data': {", "Hardware Board Revision Number : 0x05 Switch Ports Model SW", "\"usecs\": 2442, \"tty\": 0, \"one_min_cpu\": 0.87, \"process\": \"IOSv e1000\", \"five_min_cpu\":", "{ \"red_sys_info\": { \"last_switchover_reason\": \"unsupported\", \"maint_mode\": \"Disabled\", \"switchovers_system_experienced\": \"0\", \"available_system_uptime\":", "'1' }, 'os': 'IOS', 'platform': 'C3750E', 'processor_type': 'PowerPC405', 'returned_to_rom_by': 'power-on',", "'524288', 'processor_type': 'PowerPC405', 'rtr_type': 'WS-C3750X-24S', 'chassis_sn': 'FDO1633Q14S', 'number_of_intfs': { 'Virtual", ", VID: V01 , SN: FOC63358WSI NAME: \"High Density Voice", "'Ten Gigabit Ethernet': '2', 'Virtual Ethernet': '2', 'Gigabit Ethernet': '28',", "V02 , SN: FOC16062824 NAME: \"C3900 AC Power Supply 1\",", "}, }, } } }, \"3\": { \"lc\": { \"WS-X6824-SFP\":", "self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output,", "'vid': 'V01', }, }, '4': { 'PVDM2-64': { 'descr': 'PVDMII", "\"V03\", \"sn\": \"FXS1821Q2H9\", } } }, \"slot\": { \"CLK-7600 1\":", "2T 10GE w/ CTS Rev. 1.5\", \"pid\": \"VS-SUP2T-10G\", \"vid\": \"V05\",", "\"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/16\", \"name\": \"Transceiver Te2/16\",", "Module', 'name': 'Switch 1 - FlexStackPlus Module', 'pid': 'C1010X-STACK', 'sn':", "18:57:18 +00:00 e1000_bia.txt 2142715904 bytes total (1989595136 bytes free) '''}", "WIC-1B-U-V2 , VID: V01, SN: 10293847 NAME: \"PVDMII DSP SIMM", "\"name\": \"CLK-7600 1\", \"descr\": \"OSR-7600 Clock FRU 1\", \"pid\": \"CLK-7600\",", "{ \"descr\": \"X2 Transceiver 10Gbase-SR Te1/5\", \"name\": \"Transceiver Te1/5\", \"pid\":", "'kellythw', 'compiled_date': 'Thu 23-Nov-06 06:26', \"image_type\": \"production image\", \"rom\": \"System", "test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def test_golden_serdes(self): self.device = Mock(**self.golden_output_serdes) obj = ShowPlatformHardwareSerdes(device=self.device) parsed_output", ", VID: V01 , SN: FOC28476ADM NAME: \"16 Port 10BaseT/100BaseTX", "\"last_modified_date\": \"Oct 17 2018 18:57:18 +00:00\", \"index\": \"269\", \"size\": \"119\",", "{'execute.return_value': ''} golden_parsed_output = { \"five_sec_cpu_total\": 13, \"five_min_cpu\": 15, \"one_min_cpu\":", "15.2(2)E8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_1 = {", "= { \"dir\": { \"flash0:/\": { \"files\": { \"e1000_bia.txt\": {", "1 (Read/Write) 0K bytes of ATA CompactFlash 2 (Read/Write) 10080K", "self.maxDiff = None with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(rp='standby', status='running') def", "'name': 'Switch 2 - Power Supply 1', 'pid': 'C3KX-PWR-007CBA', 'sn':", "on Slot 0', 'pid': 'AIM-VPN/SSL-3', 'sn': 'FOC758693YO', 'vid': 'V01', },", "0.48% 0 PIM Process 84 3582279 1466728 2442 0.55% 0.87%", "PID: NM-16ESW , VID: V01 , SN: FOC135464KO NAME: \"Gigabit(1000BaseT)", "self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_semi_empty(self): self.dev2 = Mock(**self.semi_empty_output) platform_obj", "DCH183500KW NAME: \"PS 1 PWR-1400-AC\", DESCR: \"AC power supply, 1400", "(fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2013 by Cisco Systems,", "'name': 'TenGigabitEthernet2/1/1', 'pid': 'SFP-10G-LR', 'sn': 'ONT182746GZ', 'vid': 'V02 ', },", "{ \"last_reload_reason\": \"Unknown reason\", \"hostname\": \"N95_1\", \"os\": \"IOS\", \"version_short\": \"15.6\",", "+00:00\", \"index\": \"269\", \"size\": \"119\", \"permissions\": \"-rw-\" }, \"config\": {", "FOC16050QP6 NAME: \"Two-Port Fast Ethernet High Speed WAN Interface Card", "109, \"tty\": 0, \"one_min_cpu\": 0.54, \"process\": \"PIM Process\", \"five_min_cpu\": 0.48,", "program is C3750E boot loader', 'rtr_type': 'WS-C3750X-24P', 'system_image': 'flash:c3750e-universalk9-mz', 'system_restarted_at':", "10BaseT/100BaseTX EtherSwitch', 'name': '16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2',", "'curr_config_register': '0xF', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 26-Jun-13 09:56', 'hostname': 'R5',", "= Mock(**self.golden_output) obj = ShowEnvironment(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output)", "SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2018 by Cisco", "'PVDMII DSP SIMM with four DSPs on Slot 0 SubSlot", "self.device = Mock(**self.golden_output) platform_obj = ShowPlatformPower(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,", "bytes of ATA System CompactFlash 0 (Read/Write) 0K bytes of", "Te2/4\" PID: X2-10GB-SR , VID: V06 , SN: ONT170202T5 NAME:", "self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj", "---------- * 1 30 WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M Configuration register is", "2\": { \"name\": \"CLK-7600 2\", \"descr\": \"OSR-7600 Clock FRU 2\",", "port 1000mb SFP Rev. 1.0\" PID: WS-X6824-SFP , VID: V01,", "Module', 'pid': 'C1010X-STACK', 'sn': 'FD232323XXZ', 'vid': 'V02 ', }, 'PWR-C2-2929WAC':", "'WS-C3750X-24S', 'chassis_sn': 'FDO1633Q14S', 'number_of_intfs': { 'Virtual Ethernet': '14', 'FastEthernet': '1',", "ROM by reload System image file is \"flash0:/vios-adventerprisek9-m\" Last reload", "Fan Module for CISCO7604 1\", \"pid\": \"FAN-MOD-4HS\", \"vid\": \"V01\", \"sn\":", "on Slot 1', 'pid': 'NM-1T3/E3=', 'sn': 'FOC28476ADM', 'vid': 'V01 ',", "\"pid\": \"X2-10GB-SR\", \"sn\": \"FNS153920YJ\", \"vid\": \"V06 \", } }, \"16\":", "Supervisor Engine 720 Rev. 5.6\", \"pid\": \"WS-SUP720-3BXL\", \"vid\": \"V05\", \"sn\":", "\"descr\": \"AC power supply, 1400 watt 1\", \"pid\": \"PWR-1400-AC\", \"vid\":", "port FXO voice interface daughtercard on Slot 1 SubSlot 1\",", "Card Rev. 4.1\" PID: WS-F6700-CFC , VID: V06, SN: SAL13516QS8", "Mock(**self.golden_output_iosv) redundancy_obj = ShowRedundancy(device=self.dev_iosv) parsed_output = redundancy_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class", "= ShowPlatformPower(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def", "class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def test_golden(self): self.device = Mock(**self.golden_output_serdes_internal) obj = ShowPlatformHardwareSerdesInternal(device=self.device)", "self.maxDiff = None self.device = Mock(**self.golden_output_1) obj = ShowProcessesCpu(device=self.device) parsed_output", "cisco WS-C3750X-24S (PowerPC405) processor (revision A0) with 524288K bytes of", "None self.dev_asr1k = Mock(**self.golden_output_asr1k) platform_obj = ShowPlatform(device=self.dev_asr1k) parsed_output = platform_obj.parse()", "\"one_min_cpu\": 0.87, \"process\": \"IOSv e1000\", \"five_min_cpu\": 2.77, \"runtime\": 3582279, \"pid\":", "Ethernet\": \"6\" }, \"version\": \"15.6(3)M2\", \"rtr_type\": \"IOSv\", \"chassis_sn\": \"9K66Z7TOKAACDEQA24N7S\", \"chassis\":", "obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1) class test_show_processes_cpu(test_show_processes_cpu_iosxe):", "EST Mon Dec 9 2019 System image file is \"flash0:c3900-universalk9-mz.SPA.150-1.M7.bin\"", "board ID 9K66Z7TOKAACDEQA24N7S 6 Gigabit Ethernet interfaces DRAM configuration is", "self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwarePlim(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "\"pid\": \"WS-F6700-DFC3CXL\", \"sn\": \"SAL1214LAG5\", \"vid\": \"V01\", } } }, }", "buffer\": \"8192\"}, \"curr_config_register\": \"0x2102\", } } golden_output_ios_cat6k = {'execute.return_value': '''", "platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm')", "}, \"slot\": { \"CLK-7600 1\": { \"other\": { \"CLK-7600 1\":", "test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowPlatform(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "PID: PVDM2-64 , VID: V01 , SN: FOC63358WSI NAME: \"High", "(revision 1.1) with C3900-SPE150/K9 with 2027520K/69632K bytes of memory. Processor", "self.device = Mock(**self.empty_output) platform_obj = ShowPlatformPower(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "is 72 bits wide with parity enabled. 255K bytes of", "ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm')", "sending email to <EMAIL>. License Level: ipservices License Type: Permanent", "Rev 1.2, 512KB L2 Cache Last reset from s/w reset", "\"size\": \"119\", \"permissions\": \"-rw-\" }, \"config\": { \"last_modified_date\": \"Oct 14", "returned to ROM by reload System image file is \"flash0:/vios-adventerprisek9-m\"", "\"3\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/3\", \"name\":", "0.87% 2.77% 0 IOSv e1000 412 113457 116196 976 0.15%", "flash:/ '''} golden_parsed_output_iosv = { \"dir\": { \"flash0:/\": { \"files\":", "= Device(name='c3850') empty_output = {'execute.return_value': ''} golden_parsed_output = { \"five_sec_cpu_total\":", "wide with parity disabled. 256K bytes of non-volatile configuration memory.", "X2-10GB-SR , VID: V06 , SN: ONT1702020H NAME: \"Transceiver Te2/3\",", "Dec 9 2019 System restarted at 10:27:57 EST Mon Dec", "\"2\", \"descr\": \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6\",", "\"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T1\", \"vid\": \"V06 \", } }, \"5\":", "'PWR-C2-2929WAC': { 'descr': 'LLL Power Supply', 'name': 'Switch 1 -", "'FOC16062824', 'vid': 'V02 ', }, }, }, 'vid': 'V05 ',", "obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output,", "}, }, }, }, }, }, '1': { 'other': {", "Mock(**self.empty_output) obj = ShowProcessesCpuSorted(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def", "'FOC65798TG8', 'subslot': { '1': { 'EM-HDA-6FXO': { 'descr': 'Six port", "port 10GE Rev. 2.0\", \"pid\": \"WS-X6816-10GE\", \"vid\": \"V02\", \"sn\": \"SAL17152QB3\",", "VID: V06 , SN: ONT170202UU NAME: \"Transceiver Te2/4\", DESCR: \"X2", "Mock(**self.golden_output) obj = ShowEnvironment(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class", "\"name\": \"2\", \"descr\": \"WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0\",", "Network (VPN) Module DRAM configuration is 72 bits wide with", ", VID: , SN: SAL11434N9G NAME: \"switching engine sub-module of", "DESCR: \"Wan Interface Card BRI U (2091, 3086)\" PID: WIC-1B-U-V2", "0 PIM Process 84 3582279 1466728 2442 0.55% 0.87% 2.77%", "\"WS-C3210X-48\" PID: WS-C3210X-48T-S , VID: V02 , SN: FD5678Z90P NAME:", "\"descr\": \"WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0\", \"pid\": \"WS-X6816-10GE\",", "by Cisco Systems, Inc. Compiled Mon 22-Jan-18 04:07 by prod_rel_team", "\"permissions\": \"drw-\" }, \"nvram\": { \"last_modified_date\": \"Oct 17 2018 18:57:10", "version_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) version_obj", "NAME: \"switching engine sub-module of 1\", DESCR: \"WS-F6K-PFC3BXL Policy Feature", "PIM Process 84 3582279 1466728 2442 0.55% 0.87% 2.77% 0", "'name': 'IOSv', 'pid': 'IOSv', 'sn': '9K66Z7TOKAACDEQA24N7S', 'vid': '1.0', }, },", "\"2700W AC power supply for CISCO7604 1\" PID: PWR-2700-AC/4 ,", "System image file is \"flash0:c3900-universalk9-mz.SPA.150-1.M7.bin\" Last reload type: Normal Reload", "Compiled Fri 05-Aug-11 00:32 by prod_rel_team ROM: System Bootstrap, Version", "\"1\", DESCR: \"VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/", "{ \"name\": \"WS-C6504-E\", \"descr\": \"Cisco Systems Cisco 6500 4-slot Chassis", "\"Transceiver Te1/4\", DESCR: \"X2 Transceiver 10Gbase-SR Te1/4\" PID: X2-10GB-SR ,", "parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_asr1k) class test_show_platform_power(test_show_platform_power_iosxe): def test_empty(self): self.device", "\"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170201TT\", \"vid\": \"V06 \", } }, },", "\"SAL11434N9G\", \"vid\": \"\", }, \"WS-F6K-PFC3BXL\": { \"descr\": \"WS-F6K-PFC3BXL Policy Feature", "'''\\ Redundant System Information : ------------------------------ Available system uptime =", "\"VS-F6K-MSFC5\", \"sn\": \"SAL17142D06\", \"vid\": \"\", }, \"VS-F6K-PFC4\": { \"descr\": \"VS-F6K-PFC4", "\"2700W AC power supply for CISCO7604 2\", \"pid\": \"PWR-2700-AC/4\", \"vid\":", "], \"five_sec_cpu_interrupts\": 0 } golden_output_1 = {'execute.return_value': ''' CPU utilization", "test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) with self.assertRaises(SchemaEmptyParserError):", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_4) def test_golden_output_5(self): self.maxDiff = None", "configuration': '512'}, 'next_reload_license_level': 'ipservices', 'number_of_intfs': {'Gigabit Ethernet': '28', 'Ten Gigabit", "Tue Sep 10 2019 System image file is \"flash:c3750e-universalk9-mz.152-2.E8.bin\" Last", "\"s72033_rp-ADVENTERPRISEK9_WAN-M\", 'compiled_by': 'kellythw', 'compiled_date': 'Thu 23-Nov-06 06:26', \"image_type\": \"production image\",", "(slot: 0) information is not available because it is in", ", VID: , SN: FOC09876NP3 '''} golden_parsed_output_6 = { 'slot':", "SPC1519005V NAME: \"2\", DESCR: \"WS-C3210X-48\" PID: WS-C3210X-48T-S , VID: V02", "= ShowPlatformPower(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff", "(fc1)', 'chassis': 'WS-C3750X-24P', 'chassis_sn': 'FDO2028F1WK', 'curr_config_register': '0xF', 'compiled_by': 'prod_rel_team', 'compiled_date':", "= Mock(**self.golden_output_active) obj = ShowPlatformHardware(device=self.device) parsed_output = obj.parse() self.maxDiff =", ", SN: ONT1702020H NAME: \"Transceiver Te2/3\", DESCR: \"X2 Transceiver 10Gbase-SR", "}, }, }, 'slot': { '0': { 'lc': { 'CISCO3845-MB':", "SOFTWARE (fc1) best-c3945-IOS3 uptime is 1 hour, 20 minutes System", "test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitch(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "976 0.15% 0.07% 0.07% 0 OSPF-1 Hello '''} def test_empty(self):", "ipbasek9 Permanent ipbasek9 security securityk9 Permanent securityk9 uc None None", "internal SIMM (Sector size 512K). Configuration register is 0x2102 '''}", "{ 'license_level': 'ipbasek9', 'license_type': 'Permanent', 'next_reload_license_level': 'ipbasek9', }, 'security': {", "2.0\", \"name\": \"msfc sub-module of 1\", \"pid\": \"VS-F6K-MSFC5\", \"sn\": \"SAL17142D06\",", "{ 'slot': { '1': { 'rp': { 'WS-C0123X-45T-S': { 'descr':", "def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) redundancy_obj =", "SFP-10G-LR , VID: V02 , SN: ONT182746GZ NAME: \"1\", DESCR:", "5Min TTY Process 368 362874 3321960 109 1.03% 0.54% 0.48%", "NAME: \"CLK-7600 1\", DESCR: \"OSR-7600 Clock FRU 1\" PID: CLK-7600", "'FOC91864MNN', 'vid': 'V01 ', }, }, }, 'vid': 'V01 ',", "best-c3945-IOS3#show inventory NAME: \"CISCO3945-CHASSIS\", DESCR: \"CISCO3945-CHASSIS\" PID: CISCO3945-CHASSIS , VID:", "platform_obj = ShowProcessesCpuHistory(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe):", "2019', 'uptime': '1 hour, 20 minutes', 'version': '15.0(1)M7', 'version_short': '15.0',", "(VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)\", \"uptime_in_curr_state\": \"1 day, 16", "\"sn\": \"FNS153920YJ\", \"vid\": \"V06 \", } }, \"16\": { \"X2-10GB-SR\":", "QCS1604P0BT '''} golden_parsed_output_5 = { 'main': { 'chassis': { 'CISCO3945-CHASSIS':", "cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device) parsed_output = cpu_platform_obj.parse() self.maxDiff = None self.assertEqual(parsed_output,", "Multiflex Trunk - T1/E1 on Slot 0 SubSlot 1\", DESCR:", "test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev = Device(name='c3850') empty_output = {'execute.return_value': ''} golden_parsed_output =", "If you are unable to comply with U.S. and local", "self.golden_parsed_output_active) def test_golden_standby(self): self.device = Mock(**self.golden_output_standby) obj = ShowVersionRp(device=self.device) parsed_output", "= None self.device = Mock(**self.golden_output_6) obj = ShowInventory(device=self.device) parsed_output =", "DESCR: \"ABC Power Supply\" PID: C3KX-PWR-350WAC , VID: V01D ,", "\"sn\": \"FXS181101V4\", } } }, \"CLK-7600 2\": { \"other\": {", "PID: EVM-HD-8FXS/DID , VID: V04 , SN: FOC65798TG8 NAME: \"Six", "SIMM (Sector size 512K). Configuration register is 0x2102 '''} golden_output_ios_1", "'next_reload_license_level': 'securityk9', }, 'uc': { 'license_level': 'None', 'license_type': 'None', 'next_reload_license_level':", "golden_parsed_output_iosv = { \"active\": { \"boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\", \"configuration_register\": \"0x2012\" },", "SN: SAL17142D06 NAME: \"VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module", "{ 'chassis': { 'CISCO3825': { 'descr': '3825 chassis', 'name': '3825", "PID: CLK-7600 , VID: , SN: FXS170802GL NAME: \"CLK-7600 2\",", "2442, \"tty\": 0, \"one_min_cpu\": 0.87, \"process\": \"IOSv e1000\", \"five_min_cpu\": 2.77,", "power-on System restarted at 12:22:21 PDT Mon Sep 10 2018", "def test_golden_active(self): self.maxDiff = None self.device = Mock(**self.golden_output_active) platform_obj =", "SN: FD232323XXZ NAME: \"GigabitEthernet1/0/49\", DESCR: \"1000BaseSX SFP\" PID: GLC-SX-MMD ,", "Version SW Image ------ ----- ----- ---------- ---------- * 1", "port 10/100/1000mb Ethernet Rev. 2.6\" PID: WS-X6748-GE-TX , VID: V02,", "import ShowVersion,\\ Dir,\\ ShowRedundancy,\\ ShowInventory,\\ ShowBootvar, \\ ShowProcessesCpuSorted,\\ ShowProcessesCpu,\\ ShowVersionRp,\\", "'SM-ES2-16-P': { 'descr': 'SM-ES2-16-P', 'name': '1', 'pid': 'SM-ES2-16-P', 'sn': 'FOC09876NP3',", ", SN: LTP13579L3R NAME: \"TenGigabitEthernet2/1/1\", DESCR: \"SFP-10GBase-LR\" PID: SFP-10G-LR ,", ", SN: FOC85389QXB '''} golden_parsed_output_8 = { 'main': { 'chassis':", "with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def test_golden_active_ipm(self):", "A0 Model number : WS-C3750X-24S-E Daughterboard assembly number : 800-32727-03", "= Mock(**self.golden_output_5) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_5)", "\"4\": { \"lc\": { \"WS-X6748-GE-TX\": { \"name\": \"4\", \"descr\": \"WS-X6748-GE-TX", "day, 16 hours, 42 minutes\", \"config_register\": \"0x0\", \"curr_sw_state\": \"ACTIVE\" }", "\"V06 \", } }, }, } } }, \"2\": {", "CLK-7600 , VID: , SN: FXS181101V4 NAME: \"1\", DESCR: \"WS-SUP720-3BXL", "Ten Gigabit Ethernet interfaces The password-recovery mechanism is enabled. 512K", "prod_rel_team ROM: Bootstrap program is C3750E boot loader BOOTLDR: C3750E", "2018 System image file is \"flash:c3750e-universalk9-mz\" This product contains cryptographic", "CISCO3945-CHASSIS (revision 1.1) with C3900-SPE150/K9 with 2027520K/69632K bytes of memory.", "Mock(**self.empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(AttributeError): parsered_output = version_obj.parse() def", "412 113457 116196 976 0.15% 0.07% 0.07% 0 OSPF-1 Hello", "}, }, } golden_output_5 = {'execute.return_value': ''' best-c3945-IOS3#show inventory NAME:", "\"five_min_cpu\": 0.07, \"runtime\": 113457, \"pid\": 412, \"five_sec_cpu\": 0.15 } },", "BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE variable = BOOTLDR variable =", "Mock(**self.empty_output) obj = ShowEnvironment(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def", "def test_golden_output_8(self): self.maxDiff = None self.device = Mock(**self.golden_output_8) obj =", "\"OSR-7600 Clock FRU 1\" PID: CLK-7600 , VID: , SN:", "X2-10GB-SR , VID: V06 , SN: ONT170202T5 NAME: \"Transceiver Te2/5\",", ", VID: V01 , SN: FOC91864MNN '''} golden_parsed_output_9 = {", "= Mock(**self.golden_output_active_opm) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff", "'pid': 'AIM-VPN/SSL-2', 'sn': 'FOC2837465E', 'vid': 'V01', 'subslot': { '0': {", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1) class test_show_processes_cpu(test_show_processes_cpu_iosxe): def test_golden(self): self.device", "self.device = Mock(**self.golden_output_5) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output,", "Te2/1\" PID: X2-10GB-SR , VID: V06 , SN: ONT17020338 NAME:", "{ \"lc\": { \"WS-X6748-GE-TX\": { \"name\": \"4\", \"descr\": \"WS-X6748-GE-TX CEF720", "1.5\" PID: VS-SUP2T-10G , VID: V05, SN: SAL17152N0F NAME: \"msfc", "'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot", "= Mock(**self.golden_output_3) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_3)", "WAN Interface Card on Slot 0 SubSlot 3', 'pid': 'HWIC-2FE',", "{ \"invoked\": 3321960, \"usecs\": 109, \"tty\": 0, \"one_min_cpu\": 0.54, \"process\":", "'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6', 'subslot': { '3': { 'HWIC-2FE': {", "\"1\": { \"rp\": { \"WS-SUP720-3BXL\": { \"name\": \"1\", \"descr\": \"WS-SUP720-3BXL", "minutes System returned to ROM by reload at 10:26:47 EST", "Systems Catalyst 6500 3-slot Chassis System\", \"pid\": \"WS-C6503-E\", \"vid\": \"V03\",", "at 10:26:47 EST Mon Dec 9 2019 System restarted at", "WS-F6K-PFC3BXL , VID: V01, SN: SAL11434LYG NAME: \"2\", DESCR: \"WS-X6748-GE-TX", "\"WS-X6824-SFP\": { \"name\": \"3\", \"descr\": \"WS-X6824-SFP CEF720 24 port 1000mb", "'c3845 Motherboard with Gigabit Ethernet on Slot 0', 'pid': 'CISCO3845-MB',", "= None self.dev = Mock(**self.golden_output) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output =", "as test_show_platform_software_status_control_processor_brief_iosxe,\\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\\ TestShowPlatformHardware as test_show_platform_hardware_iosxe,\\ TestShowPlatformHardwarePlim as", "on Slot 0 SubSlot 0\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48", "free) '''} def test_empty(self): self.dev1 = Mock(**self.empty_output) dir_obj = Dir(device=self.dev1)", "def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowPlatformHardware(device=self.device) parsed_output =", "= platform_obj.parse() def test_semi_empty(self): self.dev2 = Mock(**self.semi_empty_output) platform_obj = ShowPlatform(device=self.dev2)", "\"vid\": \"V06 \", } }, }, } } }, \"2\":", "FXO voice interface daughtercard on Slot 1 SubSlot 1', 'pid':", "TestShowPlatformPower as test_show_platform_power_iosxe,\\ TestShowVersionRp as test_show_version_rp_iosxe,\\ TestShowProcessesCpu as test_show_processes_cpu_iosxe,\\ TestShowProcessesCpuHistory", "obj = ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe):", "\"X2 Transceiver 10Gbase-SR Te1/5\", \"name\": \"Transceiver Te1/5\", \"pid\": \"X2-10GB-SR\", \"sn\":", "def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) with", "obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(port='0/0/0') self.maxDiff = None self.assertEqual(parsed_output,", "'15.2(3r)E, RELEASE SOFTWARE (fc1)', 'chassis': 'WS-C3750X-24P', 'chassis_sn': 'FDO2028F1WK', 'curr_config_register': '0xF',", "FDO1633Q14S Last reset from power-on 14 Virtual Ethernet interfaces 1", "one minute: 4%; five minutes: 9% PID Runtime(ms) Invoked uSecs", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_module(test_show_module_iosxe): def test_empty(self): self.dev1", "C3750E boot loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 15.2(3r)E,", "{ 'rp': { 'CISCO3825': { 'subslot': { '0': { 'VWIC2-2MFT-T1/E1':", "None self.assertEqual(parsed_output, self.golden_parsed_output_subslot) def test_golden_slot_internal(self): self.device = Mock(**self.golden_output_slot_internal) obj =", "\"Gigabit(1000BaseT) module for EtherSwitch NM\" PID: GE-DCARD-ESW , VID: V01", "\"50\", 'Virtual Ethernet/IEEE 802.3': '1' }, \"mem_size\": {\"non-volatile configuration\": \"1917\",", "V06 , SN: ONT1702020H NAME: \"Transceiver Te2/3\", DESCR: \"X2 Transceiver", "is 9 weeks, 4 days, 2 hours, 3 minutes System", "VID: , SN: FXS170802GL NAME: \"1\", DESCR: \"VS-SUP2T-10G 5 ports", "self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output", "of memory. Processor board ID 9K66Z7TOKAACDEQA24N7S 6 Gigabit Ethernet interfaces", "class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics(", ", SN: FXS170802GL NAME: \"1\", DESCR: \"VS-SUP2T-10G 5 ports Supervisor", "memory. 65536K bytes of Flash internal SIMM (Sector size 512K).", "Code Number : CMMPP00DRB Hardware Board Revision Number : 0x05", "None self.dev_iosv = Mock(**self.golden_output_ios_cat6k) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse()", "test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) dir_obj = Dir(device=self.dev_iosv)", "DESCR: \"WS-C8888X-88\" PID: WS-C0123X-45T-S , VID: V00 , SN: FDO123R12W", "1\", DESCR: \"BCA Power Supply\" PID: C3KX-PWR-007CBA , VID: V01L", "Power Supply 1', 'pid': 'PWR-C2-2929WAC', 'sn': 'LIT03728KKK', 'vid': 'V02L ',", "on Slot 1 SubSlot 1', 'pid': 'EM-HDA-6FXO', 'sn': 'FOC85389QXB', 'vid':", "'1', 'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2' }, 'mem_size':", "\"DCH170900PF\", } } }, \"PS 1 PWR-2700-AC/4\": { \"other\": {", "SN: DCH183500KW NAME: \"PS 1 PWR-1400-AC\", DESCR: \"AC power supply,", "\"APS17070093\", } } }, \"1\": { \"rp\": { \"VS-SUP2T-10G\": {", "System\" PID: WS-C6504-E , VID: V01, SN: FXS1712Q1R8 NAME: \"CLK-7600", "V06 , SN: ONT170202T1 NAME: \"Transceiver Te1/5\", DESCR: \"X2 Transceiver", "variable = Configuration register is 0x2012 Standby not ready to", "'''} golden_parsed_output_ios_1 = { 'version': {'version_short': '15.2', 'platform': 'C3750E', 'version':", "power supply, 1400 watt 1\" PID: PWR-1400-AC , VID: V01,", "'SFP-10G-SR': { 'descr': 'SFP-10GBase-SR', 'name': 'TenGigabitEthernet1/1/1', 'pid': 'SFP-10G-SR', 'sn': 'SPC1519005V',", "2142715904 bytes total (1989595136 bytes free) '''} def test_empty(self): self.dev1", "Configuration register is 0xF '''} golden_parsed_output_ios_1 = { 'version': {'version_short':", "\"FNS153920YJ\", \"vid\": \"V06 \", } }, \"16\": { \"X2-10GB-SR\": {", "\"ONT17020338\", \"vid\": \"V06 \", } }, \"2\": { \"X2-10GB-SR\": {", "= Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "DESCR: \"WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1\" PID: WS-F6700-DFC3CXL", "'High Density Voice Module - 8FXS/DID', 'name': 'High Density Voice", "{ \"lc\": { \"WS-X6824-SFP\": { \"name\": \"3\", \"descr\": \"WS-X6824-SFP CEF720", "SN: FD5678Z90P NAME: \"Switch 2 - Power Supply 1\", DESCR:", "product contains cryptographic features and is subject to United States", "\"WS-C6504-E\": { \"name\": \"WS-C6504-E\", \"descr\": \"Cisco Systems Cisco 6500 4-slot", "{'execute.return_value': '''\\ Directory of flash0:/ 1 drw- 0 Jan 30", "AC Power Supply 1': { 'other': { 'C3900 AC Power", "1 PWR-1400-AC\": { \"name\": \"PS 1 PWR-1400-AC\", \"descr\": \"AC power", "def test_empty(self): self.dev1 = Mock(**self.empty_output) inventory_obj = ShowInventory(device=self.dev1) with self.assertRaises(SchemaEmptyParserError):", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_8) def test_golden_output_9(self): self.maxDiff = None self.device", "1\", DESCR: \"WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8\" PID:", "ShowEnvironment,\\ ShowModule,\\ ShowSwitch, ShowSwitchDetail from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as test_show_platform_iosxe,\\", "'pid': 'SM-ES2-16-P', 'sn': 'FOC09876NP3', 'vid': '', }, }, }, },", "DSPs on Slot 0 SubSlot 4', 'pid': 'PVDM2-64', 'sn': 'FOC63358WSI',", "with self.assertRaises(KeyError): parsed_output = version_obj.parse() def test_golden_iosv(self): self.maxDiff = None", "Card 4 Rev. 2.0\", \"name\": \"VS-F6K-PFC4 Policy Feature Card 4", "Feature Card 4 Rev. 2.0\" PID: VS-F6K-PFC4 , VID: V03,", "DRAM configuration is 72 bits wide with parity disabled. 256K", "\"Cisco Systems Catalyst 6500 3-slot Chassis System\" PID: WS-C6503-E ,", "'V01 ', }, }, }, 'vid': 'V01 ', }, },", "= None self.device = Mock(**self.golden_output_5) obj = ShowInventory(device=self.device) parsed_output =", "dev = Device(name='ios') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''}", "DESCR: \"SFP-10GBase-LR\" PID: SFP-10G-LR , VID: V02 , SN: ONT182746GZ", "System serial number : FDO1633Q14S Top Assembly Part Number :", "= Mock(**self.golden_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output)", "Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2018 by Cisco Systems, Inc.", "{ \"descr\": \"X2 Transceiver 10Gbase-SR Te2/2\", \"name\": \"Transceiver Te2/2\", \"pid\":", "from power-on 2 Virtual Ethernet interfaces 1 FastEthernet interface 28", "{'execute.return_value': ''' NAME: \"WS-C6504-E\", DESCR: \"Cisco Systems Cisco 6500 4-slot", "}, } } }, \"2\": { \"lc\": { \"WS-X6748-GE-TX\": {", "V03 , SN: FOC85389QXB '''} golden_parsed_output_8 = { 'main': {", "\"0\", \"permissions\": \"drw-\" }, \"vios-adventerprisek9-m\": { \"last_modified_date\": \"Mar 29 2017", "memory. 2000880K bytes of ATA System CompactFlash 0 (Read/Write) License", "test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) with", "Slot 0 SubSlot 4', 'pid': 'PVDM2-64', 'sn': 'FOC63358WSI', 'vid': 'V01", "self.maxDiff = None self.device = Mock(**self.golden_output_3) obj = ShowInventory(device=self.device) parsed_output", "is 0x2102 '''} parsed_output = { 'version': { 'chassis': 'CISCO3945-CHASSIS',", "\"Transceiver Te2/1\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT17020338\", \"vid\": \"V06 \", }", "Ethernet Rev. 3.4\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V04\", \"sn\": \"SAL14017TWF\", \"subslot\":", ", SN: ONT170202T5 NAME: \"Transceiver Te2/5\", DESCR: \"X2 Transceiver 10Gbase-SR", "1 Virtual Private Network (VPN) Module DRAM configuration is 72", "obj = ShowPlatformHardwareSerdes(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe):", "obj = ShowVersionRp(device=self.device) self.maxDiff = None with self.assertRaises(SchemaEmptyParserError): parsed_output =", "'''} def test_empty(self): self.dev = Mock(**self.empty_output) platform_obj = ShowBootvar(device=self.dev) with", "\"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te1/4\", \"name\": \"Transceiver Te1/4\",", "VID: V01, SN: FOC2837465E '''} golden_parsed_output_7 = { 'main': {", "self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform(test_show_platform_iosxe): def test_empty(self): self.dev1 =", "}, \"FAN-MOD-4HS 1\": { \"other\": { \"FAN-MOD-4HS 1\": { \"name\":", "sending email to <EMAIL>. Cisco CISCO3945-CHASSIS (revision 1.1) with C3900-SPE150/K9", "platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def test_empty(self): self.device = Mock(**self.empty_output)", "{ \"last_modified_date\": \"Oct 14 2013 00:00:00 +00:00\", \"index\": \"264\", \"size\":", "'version': {'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version ' '15.2(3r)E, RELEASE", "NAME: \"PS 2 PWR-2700-AC/4\", DESCR: \"2700W AC power supply for", "{ \"VS-F6K-MSFC5\": { \"descr\": \"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\", \"name\":", "= platform_obj.parse() def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output)", "V04, SN: SAL171848KL NAME: \"4\", DESCR: \"WS-X6748-GE-TX CEF720 48 port", "}, 'slot': { '0': { 'rp': { 'C3900-SPE150/K9': { 'descr':", "(c) 1986-2013 by Cisco Systems, Inc. Compiled Wed 26-Jun-13 09:56", "packet buffer memory. 65536K bytes of Flash internal SIMM (Sector", "0', 'pid': 'AIM-VPN/SSL-3', 'sn': 'FOC758693YO', 'vid': 'V01', }, }, },", ", VID: V06 , SN: ONT170202UU NAME: \"Transceiver Te2/4\", DESCR:", "'ipservices', 'chassis': 'WS-C3750X-24S', 'main_mem': '524288', 'processor_type': 'PowerPC405', 'rtr_type': 'WS-C3750X-24S', 'chassis_sn':", "3 days, 10 hours, 27 minutes', 'returned_to_rom_by': 'power-on', 'system_restarted_at': '05:06:40", "'license_type': 'None', 'next_reload_license_level': 'None', }, }, 'main_mem': '2027520', 'mem_size': {", "Mon Sep 10 2018 System image file is \"flash:c3750e-universalk9-mz\" This", "'pid': 'NM-16ESW', 'sn': 'FOC135464KO', 'subslot': { '0': { 'GE-DCARD-ESW': {", "Clock FRU 1\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\", }", "imply third-party authority to import, export, distribute or use encryption.", "'1': { 'C1010X-STACK': { 'descr': 'Stacking Module', 'name': 'Switch 1", "= Dir(device=self.dev_iosv) parsed_output = dir_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_redundancy(unittest.TestCase): dev1", "1\", \"pid\": \"FAN-MOD-4HS\", \"vid\": \"V01\", \"sn\": \"DCH170900PF\", } } },", "'vid': 'V01 ', }, }, }, 'vid': 'V05 ', },", "{ \"name\": \"CLK-7600 1\", \"descr\": \"OSR-7600 Clock FRU 1\", \"pid\":", "3321960 109 1.03% 0.54% 0.48% 0 PIM Process 84 3582279", "FDO202907UH Model revision number : W0 Motherboard revision number :", "= ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_8) def test_golden_output_9(self): self.maxDiff", "CTS Rev. 1.5\", \"pid\": \"VS-SUP2T-10G\", \"vid\": \"V05\", \"sn\": \"SAL17152N0F\", \"subslot\":", "V02, SN: SAL1128UPQ9 NAME: \"switching engine sub-module of 2\", DESCR:", "\"os\": \"IOS\", \"version_short\": \"15.6\", \"number_of_intfs\": { \"Gigabit Ethernet\": \"6\" },", "} } golden_output_iosv = {'execute.return_value': '''\\ Cisco IOS Software, IOSv", "is 0x2012 Standby not ready to show bootvar '''} def", "\"PS 1 PWR-2700-AC/4\": { \"name\": \"PS 1 PWR-2700-AC/4\", \"descr\": \"2700W", "'image_type': 'production image', 'last_reload_reason': 'Reload Command', 'last_reload_type': 'Normal Reload', 'license_udi':", "SN: FOC65798TG8 NAME: \"Six port FXO voice interface daughtercard on", "SN: SAL11434N9G NAME: \"switching engine sub-module of 1\", DESCR: \"WS-F6K-PFC3BXL", "golden_parsed_output_7 = { 'main': { 'chassis': { 'CISCO2821': { 'descr':", "{ \"descr\": \"VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0\", \"name\":", "Te2/2\" PID: X2-10GB-SR , VID: V06 , SN: ONT1702020H NAME:", "\"maint_mode\": \"Disabled\", \"switchovers_system_experienced\": \"0\", \"available_system_uptime\": \"0 minutes\", \"communications\": \"Down\", \"hw_mode\":", "contact us by sending email to <EMAIL>. License Level: ipservices", "\"FXS1821Q2H9\", \"last_reload_reason\": \"s/w reset\", 'processor_board_flash': '65536K', \"number_of_intfs\": { \"Gigabit Ethernet/IEEE", "1 - Power Supply 1\", DESCR: \"LLL Power Supply\" PID:", "----------------------------------------------------------------- Technology Technology-package Technology-package Current Type Next reboot ------------------------------------------------------------------ ipbase", "\"os\": \"IOS\", \"version_short\": \"12.2\", \"platform\": \"s72033_rp\", \"version\": \"12.2(18)SXF7\", \"image_id\": \"s72033_rp-ADVENTERPRISEK9_WAN-M\",", "\"\", \"sn\": \"FXS181101V4\", } } }, \"CLK-7600 2\": { \"other\":", "2 - Power Supply 1\", DESCR: \"BCA Power Supply\" PID:", "\"\", }, \"VS-F6K-PFC4\": { \"descr\": \"VS-F6K-PFC4 Policy Feature Card 4", "{'execute.return_value': ''' NAME: \"1\", DESCR: \"WS-C8888X-88\" PID: WS-C0123X-45T-S , VID:", "configuration\": \"1917\", \"packet buffer\": \"8192\"}, \"curr_config_register\": \"0x2102\", } } golden_output_ios_cat6k", "{'execute.return_value': '''\\ show processes cpu sorted 5min | inc CPU", "VID: , SN: FXS181101V4 NAME: \"CLK-7600 2\", DESCR: \"OSR-7600 Clock", "} } golden_output_ios_cat6k = {'execute.return_value': ''' show version Cisco Internetwork", "1\": { \"name\": \"CLK-7600 1\", \"descr\": \"OSR-7600 Clock FRU 1\",", "SN: ONT170202UU NAME: \"Transceiver Te2/4\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/4\"", "2 SubSlot 0', 'pid': 'GE-DCARD-ESW', 'sn': 'FOC91864MNN', 'vid': 'V01 ',", "V02 , SN: ONT182746GZ NAME: \"1\", DESCR: \"WS-C1010XR-48FPS-I\" PID: WS-C1010XR-48FPS-I,", "Rev. 1.0\" PID: WS-F6K-DFC4-A , VID: V04, SN: SAL171848KL NAME:", "self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios) version_obj = ShowVersion(device=self.dev_iosv) parsed_output", "}, 3: { \"invoked\": 116196, \"usecs\": 976, \"tty\": 0, \"one_min_cpu\":", "Card 4 Rev. 2.0\" PID: VS-F6K-PFC4 , VID: V03, SN:", "Cisco 3900 ISR on Slot 0\", DESCR: \"Cisco Services Performance", "73-15476-04 Motherboard serial number : FDO202907UH Model revision number :", "} }, \"1\": { \"rp\": { \"WS-SUP720-3BXL\": { \"name\": \"1\",", "show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device)", "= obj.parse(status='active', slot='0') class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def test_golden_active_ipm(self): self.device = Mock(**self.golden_output_active_ipm)", "2 ports Supervisor Engine 720 Rev. 5.6\" PID: WS-SUP720-3BXL ,", "'FastEthernet': '1' }, 'os': 'IOS', 'platform': 'C3750E', 'processor_type': 'PowerPC405', 'returned_to_rom_by':", "Mon 22-Jan-18 04:07 by prod_rel_team ROM: Bootstrap program is C3750E", "returned to ROM by power-on System restarted at 05:06:40 GMT", "} golden_output_3 = {'execute.return_value': ''' # show inventory NAME: \"WS-C6503-E\",", "self.assertEqual(parsed_output, self.golden_parsed_output_8) def test_golden_output_9(self): self.maxDiff = None self.device = Mock(**self.golden_output_9)", "\"configuration_register\": \"0x2012\" }, \"next_reload_boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\" } golden_output_iosv = {'execute.return_value': '''\\", "3-slot Chassis System\" PID: WS-C6503-E , VID: V03, SN: FXS1821Q2H9", "'name': '3845 chassis', 'pid': 'CISCO3845', 'sn': 'FTX6666ARJ9', 'vid': 'V05 ',", "ISR on Slot 0\", DESCR: \"Cisco Services Performance Engine 150", "\"Gigabit Ethernet/IEEE 802.3\": \"50\", 'Virtual Ethernet/IEEE 802.3': '1' }, \"mem_size\":", "}, }, 'other': { 'AIM-VPN/SSL-3': { 'descr': 'Encryption AIM Element',", "serial number : FDO202907UH Model revision number : W0 Motherboard", "ShowProcessesCpuSorted,\\ ShowProcessesCpu,\\ ShowVersionRp,\\ ShowPlatform,\\ ShowPlatformPower,\\ ShowProcessesCpuHistory,\\ ShowProcessesCpuPlatform,\\ ShowPlatformSoftwareStatusControl,\\ ShowPlatformSoftwareSlotActiveMonitorMem,\\ ShowPlatformHardware,\\", "Compiled Thu 23-Nov-06 06:26 by kellythw Image text-base: 0x40101040, data-base:", "ShowPlatformHardware,\\ ShowPlatformHardwarePlim,\\ ShowPlatformHardwareQfpBqsOpmMapping,\\ ShowPlatformHardwareQfpBqsIpmMapping,\\ ShowPlatformHardwareSerdes,\\ ShowPlatformHardwareSerdesInternal,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\\ ShowPlatformHardwareQfpStatisticsDrop,\\ ShowEnvironment,\\", "golden_output_ios = {'execute.return_value': '''\\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M),", "} } }, \"PS 2 PWR-2700-AC/4\": { \"other\": { \"PS", "1.0\", \"platform\": \"IOSv\", \"image_type\": \"production image\", 'processor_board_flash': '10080K', 'returned_to_rom_by': 'reload',", "= {'execute.return_value': ''' CPU utilization for five seconds: 4%/0%; one", "status='active', interface='gigabitEthernet 0/0/0') def test_golden(self): self.maxDiff = None self.device =", "is \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\" This product contains cryptographic features and is subject", "PDT Mon Sep 10 2018 System image file is \"flash:c3750e-universalk9-mz\"", "ATA CompactFlash 3 (Read/Write) Configuration register is 0x0'''} golden_parsed_output_ios =", "10Gbase-SR Te2/3\" PID: X2-10GB-SR , VID: V06 , SN: ONT170202UU", "Support: http://www.cisco.com/techsupport Copyright (c) 1986-2018 by Cisco Systems, Inc. Compiled", "interface='gigabitEthernet 0/0/0') self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def test_empty(self): self.device =", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_empty(self): self.device1 = Mock(**self.empty_output)", "5 hours, 16 minutes\", \"system_image\": \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\", \"chassis\": \"WS-C6503-E\", \"main_mem\": \"983008\",", "'main': { 'chassis': { 'CISCO3945-CHASSIS': { 'descr': 'CISCO3945-CHASSIS', 'name': 'CISCO3945-CHASSIS',", "PID: C3KX-PWR-350WAC , VID: V01D , SN: DTN1504L0E9 NAME: \"TenGigabitEthernet1/1/1\",", "Performance Engine 150 for Cisco 3900 ISR on Slot 0\",", "\"269\", \"size\": \"119\", \"permissions\": \"-rw-\" }, \"config\": { \"last_modified_date\": \"Oct", "\"3845 chassis\" PID: CISCO3845 , VID: V05 , SN: FTX6666ARJ9", "third-party authority to import, export, distribute or use encryption. Importers,", "'system_image': 'flash:c3750e-universalk9-mz', 'system_restarted_at': '12:22:21 PDT Mon Sep 10 2018', 'uptime':", "'Cisco Services Performance Engine 150 for Cisco 3900 ISR', 'name':", "'512'}, 'next_reload_license_level': 'ipservices', 'number_of_intfs': {'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet':", "2-Port RJ-48 Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port", "0K bytes of ATA CompactFlash 1 (Read/Write) 0K bytes of", "}, } golden_output_9 = {'execute.return_value': ''' NAME: \"3845 chassis\", DESCR:", "{ \"CLK-7600 1\": { \"other\": { \"CLK-7600 1\": { \"name\":", "4 days, 2 hours, 3 minutes', 'version': '12.2(55)SE8', 'version_short': '12.2'", "None self.assertEqual(parsed_output, self.golden_parsed_output_port) def test_golden_slot(self): self.device = Mock(**self.golden_output_slot) obj =", "self.device = Mock(**self.golden_output_subslot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(subslot='0/1') self.maxDiff", "= obj.parse(status='active', slot='0') class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def test_golden_serdes(self): self.device = Mock(**self.golden_output_serdes)", "version_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) version_obj = ShowVersion(device=self.dev1) with", "'''} golden_parsed_output_ios_cat6k = { \"version\": { \"os\": \"IOS\", \"version_short\": \"12.2\",", "IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 15.2(2)E8, RELEASE SOFTWARE (fc1)", ", VID: , SN: FXS170802GL NAME: \"1\", DESCR: \"VS-SUP2T-10G 5", "= None self.device = Mock(**self.golden_output_active_opm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output", "obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_4) def test_golden_output_5(self):", "Cisco cryptographic products does not imply third-party authority to import,", "Transceiver 10Gbase-SR Te2/16\" PID: X2-10GB-SR , VID: V06 , SN:", "parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios) def test_golden_ios_cat6k(self): self.maxDiff = None", "'Stacking Module', 'name': 'Switch 1 - FlexStackPlus Module', 'pid': 'C1010X-STACK',", ", SN: FOC98675U0D NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk", "}, }, } golden_output_2 = {'execute.return_value': ''' NAME: \"WS-C6504-E\", DESCR:", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_9) class test_show_bootvar(unittest.TestCase): dev = Device(name='ios') dev_iosv", "'DISABLED' state '''} def test_empty(self): self.dev1 = Mock(**self.empty_output) redundancy_obj =", "None self.assertEqual(parsed_output, self.golden_parsed_output_serdes) def test_empty(self): self.device1 = Mock(**self.empty_output) obj =", "of ATA CompactFlash 1 (Read/Write) 0K bytes of ATA CompactFlash", "= {'execute.return_value': '''\\ Redundant System Information : ------------------------------ Available system", "Copyright (c) 1986-2018 by Cisco Systems, Inc. Compiled Mon 22-Jan-18", "golden_parsed_output_6 = { 'slot': { '1': { 'lc': { 'SM-ES2-16-P':", "test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitch(device=self.dev_c3850)", "\"curr_config_register\": \"0x0\", \"rom\": \"Bootstrap program is IOSv\", \"uptime\": \"1 day,", "\"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\", } } }, \"FAN-MOD-4HS", "of U.S. laws governing Cisco cryptographic products may be found", "def test_golden_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1) obj =", "0 IOSv e1000 412 113457 116196 976 0.15% 0.07% 0.07%", "Slot 0', 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6', 'subslot': { '3': {", "exporters, distributors and users are responsible for compliance with U.S.", "obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff =", "Supply 1', 'pid': 'C3KX-PWR-007CBA', 'sn': 'LTP13579L3R', 'vid': 'V01L ', },", "NAME: \"msfc sub-module of 1\", DESCR: \"VS-F6K-MSFC5 CPU Daughterboard Rev.", "ISR', 'name': 'Cisco Services Performance Engine 150 for Cisco 3900", "(C3750X-HBOOT-M) Version 15.2(3r)E, RELEASE SOFTWARE (fc1) R5 uptime is 9", "it is in 'DISABLED' state '''} def test_empty(self): self.dev1 =", "X.25 software, Version 3.0.0. Bridging software. TN3270 Emulation software. 1", "{ \"WS-X6824-SFP\": { \"name\": \"3\", \"descr\": \"WS-X6824-SFP CEF720 24 port", "CISCO7604 1\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS1707008Y\", } }", "License UDI: ------------------------------------------------- Device# PID SN ------------------------------------------------- *0 C3900-SPE150/K9 FOC16050QP6", "Supply 1': { 'other': { 'C3900 AC Power Supply 1':", "Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} semi_empty_output =", "\"12.2(18)SXF7\", \"image_id\": \"s72033_rp-ADVENTERPRISEK9_WAN-M\", 'compiled_by': 'kellythw', 'compiled_date': 'Thu 23-Nov-06 06:26', \"image_type\":", "'vid': 'V01 ', }, }, }, '16': { 'lc': {", "self.golden_parsed_output_iosv) class test_show_redundancy(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output", "days, 5 hours, 15 minutes System returned to ROM by", "2\" PID: PWR-2700-AC/4 , VID: V03, SN: APS17070093 '''} golden_parsed_output_3", "VID: V05 , SN: FTX7908A3RQ NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48", "self.maxDiff = None self.device = Mock(**self.golden_output_5) obj = ShowInventory(device=self.device) parsed_output", "3: { \"invoked\": 116196, \"usecs\": 976, \"tty\": 0, \"one_min_cpu\": 0.07,", "3900 ISR on Slot 0\", DESCR: \"Cisco Services Performance Engine", "Transceiver 10Gbase-SR Te2/1\", \"name\": \"Transceiver Te2/1\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT17020338\",", "on Slot 2 SubSlot 0\", DESCR: \"Gigabit(1000BaseT) module for EtherSwitch", "\"-rw-\" }, \"boot\": { \"last_modified_date\": \"Jan 30 2013 00:00:00 +00:00\",", "1\", DESCR: \"OSR-7600 Clock FRU 1\" PID: CLK-7600 , VID:", "parsed_output = platform_obj.parse() def test_semi_empty(self): self.dev2 = Mock(**self.semi_empty_output) platform_obj =", "}, }, }, }, }, } golden_output_8 = {'execute.return_value': '''", "'slot': { '0': { 'lc': { 'CISCO3845-MB': { 'descr': 'c3845", "self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowProcessesCpuHistory(device=self.device) parsed_output", "FDO1633Q14S Top Assembly Part Number : 800-33746-04 Top Assembly Revision", "Transceiver 10Gbase-SR Te1/4\", \"name\": \"Transceiver Te1/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T1\",", "NAME: \"Two-Port Fast Ethernet High Speed WAN Interface Card on", "\"WS-F6K-DFC4-A\": { \"descr\": \"WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0\",", "'FGL161010K8', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Fri 05-Aug-11 00:32', 'curr_config_register': '0x2102', 'hostname':", "'pid': 'NM-1T3/E3=', 'sn': 'FOC28476ADM', 'vid': 'V01 ', }, }, },", "42 minutes\" } } golden_output_iosv = {'execute.return_value': '''\\ Cisco IOS", "System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1) best-c3945-IOS3 uptime is", "Card BRI U (2091, 3086)', 'name': 'Wan Interface Card BRI", "\"X2 Transceiver 10Gbase-SR Te1/4\", \"name\": \"Transceiver Te1/4\", \"pid\": \"X2-10GB-SR\", \"sn\":", "\"WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0\", \"pid\": \"WS-X6824-SFP\",", "'R5', 'image_id': 'C3750E-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'power-on', 'license_level': 'ipservices',", "Mock(**self.semi_empty_output) platform_obj = ShowPlatform(device=self.dev2) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def", "= None with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(rp='standby', status='running') def test_empty(self):", "0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675U0D', 'vid': 'V01 ', }, },", "{'execute.return_value': '''\\ BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE variable = BOOTLDR", "PID: X2-10GB-SR , VID: V05 , SN: AGA1515XZE2 NAME: \"Transceiver", "{ \"chassis\": { \"WS-C6503-E\": { \"name\": \"WS-C6503-E\", \"descr\": \"Cisco Systems", "MAC Address : 84:3D:C6:FF:F1:B8 Motherboard assembly number : 73-15476-04 Motherboard", "}, }, '1': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port", "image', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'main_mem': '262144', 'mem_size':", "hour, 20 minutes', 'version': '15.0(1)M7', 'version_short': '15.0', }, } def", "'returned_to_rom_by': 'power cycle', \"rtr_type\": \"WS-C6503-E\", \"chassis_sn\": \"FXS1821Q2H9\", \"last_reload_reason\": \"s/w reset\",", "\", } }, \"3\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver", "'name': 'Cisco Services Performance Engine 150 for Cisco 3900 ISR", "self.device = Mock(**self.golden_output_active_opm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse(", "'CISCO2821': { 'descr': '2821 chassis', 'name': '2821 chassis', 'pid': 'CISCO2821',", "slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_empty(self): self.device1 =", "VS-SUP2T-10G , VID: V05, SN: SAL17152N0F NAME: \"msfc sub-module of", "utilization for five seconds: 13%/0%; one minute: 23%; five minutes:", "'rtr_type': 'CISCO3945-CHASSIS', 'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at': '10:27:57 EST Mon Dec 9", "\"sn\": \"FXS181101V4\", } } }, \"1\": { \"rp\": { \"WS-SUP720-3BXL\":", "\"flash0:/vios-adventerprisek9-m\" Last reload reason: Unknown reason This product contains cryptographic", "'''} golden_parsed_output_6 = { 'slot': { '1': { 'lc': {", "V01 , SN: FOC135464KO NAME: \"Gigabit(1000BaseT) module for EtherSwitch NM", "= None self.assertEqual( parsed_output, self.golden_parsed_output_serdes_internal) def test_empty(self): self.device1 = Mock(**self.empty_output)", "SN: FOC729346GQ NAME: \"Virtual Private Network (VPN) Module on Slot", "PID: PWR-1400-AC , VID: V01, SN: ABC0830J127 '''} golden_output_4 =", "'lc': { 'CISCO3845-MB': { 'descr': 'c3845 Motherboard with Gigabit Ethernet',", "(Sector size 512K). Configuration register is 0x2102 '''} golden_output_ios_1 =", "DESCR: \"AC power supply, 1400 watt 1\" PID: PWR-1400-AC ,", "{ 'WS-C0123X-45T-S': { 'descr': 'WS-C8888X-88', 'name': '1', 'pid': 'WS-C0123X-45T-S', 'sn':", "-rw- 524288 Oct 17 2018 18:57:10 +00:00 nvram 269 -rw-", "= Mock(**self.golden_output_8) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_8)", "= {'execute.return_value': '''\\ Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version", "WS-C3750X-24S (PowerPC405) processor (revision A0) with 524288K bytes of memory.", "2 Virtual Ethernet interfaces 1 FastEthernet interface 28 Gigabit Ethernet", "------ ----- ----- ---------- ---------- * 1 30 WS-C3750X-24P 12.2(55)SE8", "}, }, }, 'C3900 AC Power Supply 1': { 'other':", "as test_show_switch_iosxe,\\ TestShowSwitchDetail as test_show_switch_detail_iosxe class TestShowVersion(unittest.TestCase): dev1 = Device(name='empty')", "{ 'rp': { 'WS-C3210X-48T-S': { 'descr': 'WS-C3210X-48', 'name': '2', 'pid':", "def test_empty(self): self.dev1 = Mock(**self.empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaEmptyParserError):", "None self.assertEqual(parsed_output, self.golden_parsed_output_slot) def test_golden_subslot(self): self.device = Mock(**self.golden_output_subslot) obj =", "'os': 'IOS', 'platform': 'C3900', 'processor_board_flash': '2000880K', 'processor_type': 'C3900-SPE150/K9', 'returned_to_rom_at': '10:26:47", "Slot 0\", DESCR: \"Encryption AIM Element\" PID: AIM-VPN/SSL-3 , VID:", "on Slot 1', 'pid': 'EVM-HD-8FXS/DID', 'sn': 'FOC65798TG8', 'subslot': { '1':", "bytes of memory. Processor board ID 9K66Z7TOKAACDEQA24N7S 6 Gigabit Ethernet", "ISR\" PID: C3900-SPE150/K9 , VID: V05 , SN: FOC16050QP6 NAME:", "Forwarding Card 4 Rev. 1.0\", \"name\": \"WS-F6K-DFC4-A Distributed Forwarding Card", "{ 'descr': 'WS-C1010XR-48FPS-I', 'name': '1', 'pid': 'WS-C1010XR-48FPS-I', 'sn': 'FD2043B0K3', 'subslot':", "is 10 weeks, 5 days, 5 hours, 16 minutes Time", "Systems, Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team ROM: Bootstrap", "DESCR: \"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\" PID: VS-F6K-MSFC5 , VID:", "with Gigabit Ethernet', 'name': 'c3845 Motherboard with Gigabit Ethernet on", "test_show_processes_cpu(test_show_processes_cpu_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) obj = ShowProcessesCpu(device=self.device) parsed_output", "'1', 'pid': 'WS-C0123X-45T-S', 'sn': 'FDO123R12W', 'subslot': { '1': { 'C3KX-PWR-350WAC':", "def test_empty(self): self.dev = Mock(**self.empty_output) platform_obj = ShowBootvar(device=self.dev) with self.assertRaises(SchemaEmptyParserError):", "PID: WS-C6504-E , VID: V01, SN: FXS1712Q1R8 NAME: \"CLK-7600 1\",", "\"pid\": \"WS-C6504-E\", \"vid\": \"V01\", \"sn\": \"FXS1712Q1R8\", } } }, \"slot\":", "class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def test_golden_active_ipm(self): self.device = Mock(**self.golden_output_active_ipm) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device)", "\", } }, \"6\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver", "1\", \"descr\": \"High Speed Fan Module for CISCO7604 1\", \"pid\":", "269 -rw- 119 Oct 17 2018 18:57:18 +00:00 e1000_bia.txt 2142715904", "'WS-C8888X-88', 'name': '1', 'pid': 'WS-C0123X-45T-S', 'sn': 'FDO123R12W', 'subslot': { '1':", "contact us by sending email to <EMAIL>. cisco WS-C6503-E (R7000)", "= None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowPlatform(device=self.dev_c3850) parsed_output =", "\"sn\": \"ABC0830J127\", } } }, }, } golden_output_3 = {'execute.return_value':", "{ 'C3KX-PWR-007CBA': { 'descr': 'BCA Power Supply', 'name': 'Switch 2", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_7) def test_golden_output_8(self): self.maxDiff = None", "empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { \"active\": { \"boot_variable\":", "(fc1) R5 uptime is 9 weeks, 4 days, 2 hours,", "hours, 16 minutes Time since cat6k_tb1 switched to active is", "'license_package': { 'data': { 'license_level': 'datak9', 'license_type': 'Permanent', 'next_reload_license_level': 'datak9',", "} } }, 'license_package': { 'data': { 'license_level': 'datak9', 'license_type':", "= ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.parsed_output) class test_dir(unittest.TestCase): dev1", "= Mock(**self.empty_output) obj = ShowPlatformHardwareSerdes(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0')", "class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def test_golden_serdes(self): self.device = Mock(**self.golden_output_serdes) obj = ShowPlatformHardwareSerdes(device=self.device)", "self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowEnvironment(device=self.dev) parsed_output", "'sn': 'FTX6666ARJ9', 'vid': 'V05 ', }, }, }, 'slot': {", "DESCR: \"X2 Transceiver 10Gbase-SR Te2/1\" PID: X2-10GB-SR , VID: V06", "System Information : ------------------------------ Available system uptime = 0 minutes", "\"1\", DESCR: \"WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6\"", "None with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(rp='standby', status='running') def test_empty(self): self.device1", "04:07 by prod_rel_team ROM: Bootstrap program is C3750E boot loader", "def test_golden_slot(self): self.device = Mock(**self.golden_output_slot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output =", "Directory of flash:/ '''} golden_parsed_output_iosv = { \"dir\": { \"flash0:/\":", "self.maxDiff = None self.dev = Mock(**self.golden_output_1) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output", "'0': { 'rp': { 'CISCO3825': { 'subslot': { '0': {", "'FOC28476ADM', 'vid': 'V01 ', }, }, }, '16': { 'lc':", "'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk -", "2 hours, 3 minutes System returned to ROM by power-on", "}, } golden_output_iosv = {'execute.return_value': '''\\ NAME: \"IOSv\", DESCR: \"IOSv", "MSFC3 Daughterboard Rev. 3.1\" PID: WS-SUP720 , VID: , SN:", "obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self):", "bytes free) '''} def test_empty(self): self.dev1 = Mock(**self.empty_output) dir_obj =", "1\" PID: FAN-MOD-4HS , VID: V01, SN: DCH170900PF NAME: \"PS", "'FOC09876NP3', 'vid': '', }, }, }, }, } golden_output_7 =", "Rev. 2.0\", \"name\": \"VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module", "'8 weeks, 3 days, 10 hours, 27 minutes', 'returned_to_rom_by': 'power-on',", "'sn': 'FOC16062824', 'vid': 'V02 ', }, }, }, 'vid': 'V05", "} } }, \"2\": { \"lc\": { \"WS-X6816-10GE\": { \"name\":", "\"five_sec_cpu\": 0.55 }, 3: { \"invoked\": 116196, \"usecs\": 976, \"tty\":", "''' NAME: \"1\", DESCR: \"SM-ES2-16-P\" PID: SM-ES2-16-P , VID: ,", ", SN: FD2043B0K3 NAME: \"Switch 1 - Power Supply 1\",", "{ 'descr': '3825 chassis', 'name': '3825 chassis', 'pid': 'CISCO3825', 'sn':", "= Configuration register is 0x2012 Standby not ready to show", "please contact us by sending email to <EMAIL>. cisco WS-C6503-E", ": 84:3D:C6:FF:F1:B8 Motherboard assembly number : 73-15476-04 Motherboard serial number", "NAME: \"PS 1 PWR-2700-AC/4\", DESCR: \"2700W AC power supply for", "'V05 ', }, }, }, 'slot': { '0': { 'rp':", "Tray 1\" PID: WS-C6503-E-FAN , VID: V02, SN: DCH183500KW NAME:", "SN: FNS153920YJ NAME: \"Transceiver Te2/16\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/16\"", "with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_version_rp(test_show_version_rp_iosxe): def test_golden_active(self): self.device", "Rev. 2.0\", \"name\": \"msfc sub-module of 1\", \"pid\": \"VS-F6K-MSFC5\", \"sn\":", "= platform_obj.parse(status='active') self.assertEqual(parsed_output, self.golden_parsed_output_active) class test_show_env(test_show_env_iosxe): def test_empty(self): self.dev =", "\"1 day, 16 hours, 42 minutes\" } } golden_output_iosv =", "Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)', 'hostname': 'sample_switch', 'uptime':", "+00:00\", \"index\": \"267\", \"size\": \"147988420\", \"permissions\": \"-rw-\" } }, \"bytes_total\":", "'PWR-3900-AC', 'sn': 'QCS1604P0BT', 'vid': 'V03 ', }, }, }, },", "Transceiver 10Gbase-SR Te2/3\" PID: X2-10GB-SR , VID: V06 , SN:", "\"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS1707008Y\", } } }, \"PS", "1\" PID: PWR-3900-AC , VID: V03 , SN: QCS1604P0BT '''}", "comply with applicable laws and regulations. If you are unable", "SN: FOC28476ADM NAME: \"16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2\",", "V02 , SN: FD232323XXZ NAME: \"GigabitEthernet1/0/49\", DESCR: \"1000BaseSX SFP\" PID:", "'sn': 'FD2043B0K3', 'subslot': { '1': { 'C1010X-STACK': { 'descr': 'Stacking", "2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot", "WS-F6K-DFC4-A , VID: V04, SN: SAL171848KL NAME: \"4\", DESCR: \"WS-X6748-GE-TX", "WS-X6816-10GE , VID: V02, SN: SAL17152QB3 NAME: \"WS-F6K-DFC4-E Distributed Forwarding", "ShowSwitch, ShowSwitchDetail from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as test_show_platform_iosxe,\\ TestShowPlatformPower as", "version Cisco IOS Software, C3900 Software (C3900-UNIVERSALK9-M), Version 15.0(1)M7, RELEASE", "number : WS-C3750X-24S-E Daughterboard assembly number : 800-32727-03 Daughterboard serial", "Wed 26-Jun-13 09:56 by prod_rel_team Image text-base: 0x00003000, data-base: 0x02800000", "= Mock(**self.empty_output) obj = ShowVersionRp(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse()", "} }, \"1\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR", "12.2(55)SE8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_cat6k = {", "FDO123R12W NAME: \"Switch 1 - Power Supply 1\", DESCR: \"ABC", "{ \"name\": \"2\", \"descr\": \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet", "0, \"one_min_cpu\": 0.07, \"process\": \"OSPF-1 Hello\", \"five_min_cpu\": 0.07, \"runtime\": 113457,", "Sep 10 2018', 'uptime': '9 weeks, 4 days, 2 hours,", "vios-adventerprisek9-m 268 -rw- 524288 Oct 17 2018 18:57:10 +00:00 nvram", "}, }, 'vid': 'V04 ', }, }, }, }, }", "None self.device = Mock(**self.golden_output_8) obj = ShowInventory(device=self.device) parsed_output = obj.parse()", "test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "{'execute.return_value': ''' # show inventory NAME: \"WS-C6503-E\", DESCR: \"Cisco Systems", "\"FAN-MOD-4HS 1\", \"descr\": \"High Speed Fan Module for CISCO7604 1\",", "def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) with self.assertRaises(SchemaEmptyParserError):", "', }, 'WS-C1010XR-48FPS-I': { 'descr': 'WS-C1010XR-48FPS-I', 'name': '1', 'pid': 'WS-C1010XR-48FPS-I',", "test_golden_active_opm(self): self.device = Mock(**self.golden_output_active_opm) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output = obj.parse(status='active',", "http://www.cisco.com/techsupport Copyright (c) 1986-2017 by Cisco Systems, Inc. Compiled Wed", "1.0, SN: 9K66Z7TOKAACDEQA24N7S '''} golden_parsed_output_2 = { \"main\": { \"chassis\":", "NAME: \"Transceiver Te2/6\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/6\" PID: X2-10GB-SR", "} golden_output_ios_cat6k = {'execute.return_value': ''' show version Cisco Internetwork Operating", "\"X2-10GB-SR\", \"sn\": \"FNS153920YJ\", \"vid\": \"V06 \", } }, \"16\": {", "'FOC63358WSI', 'vid': 'V01 ', }, }, }, }, }, },", "113457 116196 976 0.15% 0.07% 0.07% 0 OSPF-1 Hello '''}", "restarted at 10:27:57 EST Mon Dec 9 2019 System image", ": V03 CLEI Code Number : CMMFF00ARC Hardware Board Revision", "'system_restarted_at': '10:27:57 EST Mon Dec 9 2019', 'uptime': '1 hour,", "Cisco IOSv (revision 1.0) with with 435457K/87040K bytes of memory.", "= Mock(**self.empty_output) obj = ShowEnvironment(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse()", "parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_1) def test_golden_ios_2(self): self.maxDiff = None", "14 2013 00:00:00 +00:00\", \"index\": \"264\", \"size\": \"0\", \"permissions\": \"drw-\"", "available because it is in 'DISABLED' state '''} def test_empty(self):", "def test_golden_output_5(self): self.maxDiff = None self.device = Mock(**self.golden_output_5) obj =", "RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0',", "V03 , SN: SPC1519005V NAME: \"2\", DESCR: \"WS-C3210X-48\" PID: WS-C3210X-48T-S", "'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675U0D', 'vid': 'V01 ', }, }, '1':", "parsed_output, self.golden_parsed_output_slot_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwarePlim(device=self.device1)", "'sn': 'DTN1504L0E9', 'vid': 'V01D ', }, }, '1/1/1': { 'SFP-10G-SR':", "on Slot 1\", DESCR: \"High Density Voice Module - 8FXS/DID\"", "test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdes(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "= ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') self.assertEqual(parsed_output,", "1 PWR-1400-AC\", DESCR: \"AC power supply, 1400 watt 1\" PID:", "parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.parsed_output) class test_dir(unittest.TestCase): dev1 = Device(name='empty')", "memory. 8192K bytes of packet buffer memory. 65536K bytes of", "} }, \"4\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR", "Simplex Maintenance Mode = Disabled Communications = Down Reason: Failure", "def test_golden(self): self.device = Mock(**self.golden_output) obj = ShowProcessesCpu(device=self.device) parsed_output =", "= ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_ios(self): self.maxDiff", "SN: SAL1214LAG5 NAME: \"WS-C6503-E-FAN 1\", DESCR: \"Enhanced 3-slot Fan Tray", "interface daughtercard on Slot 1 SubSlot 1\", DESCR: \"Six port", "securityk9 Permanent securityk9 uc None None None data datak9 Permanent", "'C3900 AC Power Supply 1': { 'descr': 'C3900 AC Power", "'NM-1T3/E3=': { 'descr': 'Clear/Subrate T3/E3 WAN', 'name': 'Clear/Subrate T3/E3 WAN", "as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\\ ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\\ TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\\ TestShowEnv as", "# show inventory NAME: \"WS-C6503-E\", DESCR: \"Cisco Systems Catalyst 6500", "\"subslot\": { \"0\": { \"VS-F6K-MSFC5\": { \"descr\": \"VS-F6K-MSFC5 CPU Daughterboard", "'V01 ', }, }, }, '16': { 'lc': { 'NM-16ESW':", "ISR on Slot 0', 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6', 'subslot': {", "Virtual Private Network (VPN) Module DRAM configuration is 72 bits", "'vid': 'V09 ', }, }, 'other': { 'AIM-VPN/SSL-3': { 'descr':", "} golden_output_2 = {'execute.return_value': ''' NAME: \"WS-C6504-E\", DESCR: \"Cisco Systems", "\"LLL Power Supply\" PID: PWR-C2-2929WAC , VID: V02L , SN:", "'chassis': { 'CISCO3825': { 'descr': '3825 chassis', 'name': '3825 chassis',", "'1': { 'EM-HDA-6FXO': { 'descr': 'Six port FXO voice interface", "= {'execute.return_value': ''} golden_parsed_output_iosv = { \"active\": { \"boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\",", "W0 Motherboard revision number : B0 Model number : WS-C3750X-24P-L", "- 2-Port RJ-48 Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 -", "\"red_sys_info\": { \"last_switchover_reason\": \"unsupported\", \"maint_mode\": \"Disabled\", \"switchovers_system_experienced\": \"0\", \"available_system_uptime\": \"0", "platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active',", "'1': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex", "{ 'descr': 'Stacking Module', 'name': 'Switch 1 - FlexStackPlus Module',", "test_empty(self): self.device1 = Mock(**self.empty_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "SubSlot 0\", DESCR: \"Gigabit(1000BaseT) module for EtherSwitch NM\" PID: GE-DCARD-ESW", "15 minutes System returned to ROM by power cycle at", "2\", DESCR: \"WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2\" PID:", "= platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch_detail(test_show_switch_detail_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output)", "{ \"WS-SUP720\": { \"descr\": \"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\", \"name\":", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_3) def test_golden_output_4(self): self.maxDiff = None", "def test_golden_ios_1(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_1) version_obj =", "{ 'PVDM2-64': { 'descr': 'PVDMII DSP SIMM with four DSPs',", "SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2013 by Cisco", "1 drw- 0 Jan 30 2013 00:00:00 +00:00 boot 264", "'descr': 'SM-ES2-16-P', 'name': '1', 'pid': 'SM-ES2-16-P', 'sn': 'FOC09876NP3', 'vid': '',", "\"FAN-MOD-4HS\", \"vid\": \"V01\", \"sn\": \"DCH170900PF\", } } }, \"PS 1", "chassis', 'name': '3825 chassis', 'pid': 'CISCO3825', 'sn': 'FTX7908A3RQ', 'vid': 'V05", "}, }, }, } golden_output_6 = {'execute.return_value': ''' NAME: \"1\",", "}, }, }, }, } golden_output_6 = {'execute.return_value': ''' NAME:", "}, }, 'slot': { '0': { 'rp': { 'CISCO3825': {", "} }, \"PS 2 PWR-2700-AC/4\": { \"other\": { \"PS 2", "SN: SAL171848KL NAME: \"4\", DESCR: \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb", "'power cycle', \"rtr_type\": \"WS-C6503-E\", \"chassis_sn\": \"FXS1821Q2H9\", \"last_reload_reason\": \"s/w reset\", 'processor_board_flash':", "RELEASE SOFTWARE (fc1)', 'rtr_type': 'CISCO3945-CHASSIS', 'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at': '10:27:57 EST", "Centralized Forwarding Card EARL sub-module of 4\", DESCR: \"WS-F6700-CFC Centralized", "\"drw-\" }, \"vios-adventerprisek9-m\": { \"last_modified_date\": \"Mar 29 2017 00:00:00 +00:00\",", "None self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.device =", "is C3750E boot loader', 'rtr_type': 'WS-C3750X-24P', 'system_image': 'flash:c3750e-universalk9-mz', 'system_restarted_at': '12:22:21", "Transceiver 10Gbase-SR Te2/3\", \"name\": \"Transceiver Te2/3\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202UU\",", "PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC65428K9F NAME: \"Wan", "chassis', 'name': '3845 chassis', 'pid': 'CISCO3845', 'sn': 'FTX6666ARJ9', 'vid': 'V05", "parsered_output = dir_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) dir_obj =", "self.assertEqual(parsed_output, self.golden_parsed_output_ios) def test_golden_ios_cat6k(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_cat6k)", "Ethernet on Slot 0\", DESCR: \"c3845 Motherboard with Gigabit Ethernet\"", "0K bytes of ATA CompactFlash 2 (Read/Write) 10080K bytes of", "interfaces 3 Gigabit Ethernet interfaces 1 Virtual Private Network (VPN)", "= dir_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_redundancy(unittest.TestCase): dev1 = Device(name='empty') dev_iosv", "2000880K bytes of ATA System CompactFlash 0 (Read/Write) License Info:", "C3750E Software (C3750E-UNIVERSALK9-M), Version 15.2(2)E8, RELEASE SOFTWARE (fc1) Technical Support:", "on Slot 0 SubSlot 4\", DESCR: \"PVDMII DSP SIMM with", "SN: FTX6666ARJ9 NAME: \"c3845 Motherboard with Gigabit Ethernet on Slot", "{ \"chassis\": { \"WS-C6504-E\": { \"name\": \"WS-C6504-E\", \"descr\": \"Cisco Systems", "LIT03728KKK NAME: \"Switch 1 - FlexStackPlus Module\", DESCR: \"Stacking Module\"", "\"X2 Transceiver 10Gbase-SR Te2/4\" PID: X2-10GB-SR , VID: V06 ,", "test_golden_active(self): self.maxDiff = None self.device = Mock(**self.golden_output_active) platform_obj = ShowPlatformHardwareQfpStatisticsDrop(", ", SN: LIT03728KKK NAME: \"Switch 1 - FlexStackPlus Module\", DESCR:", "EARL sub-module of 1\", DESCR: \"VS-F6K-PFC4 Policy Feature Card 4", ", VID: V06, SN: SAL13516QS8 NAME: \"FAN-MOD-4HS 1\", DESCR: \"High", "\"communications_reason\": \"Failure\", \"standby_failures\": \"0\" }, \"slot\": { \"slot 0\": {", "\"name\": \"WS-C6504-E\", \"descr\": \"Cisco Systems Cisco 6500 4-slot Chassis System\",", "platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) platform_obj", "self.device = Mock(**self.golden_output_serdes) obj = ShowPlatformHardwareSerdes(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff", "\"descr\": \"WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6\", \"pid\":", "FOC135464KO NAME: \"Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2", "None None None data datak9 Permanent datak9 Configuration register is", "\"name\": \"Transceiver Te2/3\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202UU\", \"vid\": \"V06 \",", "System returned to ROM by power-on System restarted at 12:22:21", "s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) cat6k_tb1 uptime", "SOFTWARE (fc1) cat6k_tb1 uptime is 10 weeks, 5 days, 5", ", VID: V03 , SN: FOC85389QXB '''} golden_parsed_output_8 = {", "\"\", \"sn\": \"FXS181101V4\", } } }, \"1\": { \"rp\": {", "self.golden_parsed_output_slot_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwarePlim(device=self.device1) with", "= { \"red_sys_info\": { \"last_switchover_reason\": \"unsupported\", \"maint_mode\": \"Disabled\", \"switchovers_system_experienced\": \"0\",", "= ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output,", "version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_ios(self):", "obj.parse() class test_show_platform(test_show_platform_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj =", "self.device = Mock(**self.golden_output_slot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff", "software (copyright 1990 by Meridian Technology Corp). X.25 software, Version", "self.device = Mock(**self.golden_output_8) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output,", "DCH170900PF NAME: \"PS 1 PWR-2700-AC/4\", DESCR: \"2700W AC power supply", "of flash:/ '''} golden_parsed_output_iosv = { \"dir\": { \"flash0:/\": {", "Daughterboard assembly number : 800-32727-03 Daughterboard serial number : FDO202823P8", "local country laws. By using this product you agree to", "RELEASE SOFTWARE (fc1) sample_switch uptime is 8 weeks, 3 days,", "\"Transceiver Te1/5\", DESCR: \"X2 Transceiver 10Gbase-SR Te1/5\" PID: X2-10GB-SR ,", "Motherboard with Gigabit Ethernet on Slot 0\", DESCR: \"c3845 Motherboard", "FastEthernet interfaces 3 Gigabit Ethernet interfaces 1 Virtual Private Network", "'slot': { '1': { 'rp': { 'WS-C0123X-45T-S': { 'descr': 'WS-C8888X-88',", "'SM-ES2-16-P', 'sn': 'FOC09876NP3', 'vid': '', }, }, }, }, }", "VID: V01, SN: 10293847 NAME: \"PVDMII DSP SIMM with four", "Mock(**self.golden_output_1) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1) class", "Mock(**self.empty_output) platform_obj = ShowProcessesCpuHistory(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def", "{ 'AIM-VPN/SSL-2': { 'descr': 'Encryption AIM Element', 'name': 'Virtual Private", "}, \"WS-C6503-E-FAN 1\": { \"other\": { \"WS-C6503-E-FAN 1\": { \"name\":", "daughtercard on Slot 1 SubSlot 1', 'pid': 'EM-HDA-6FXO', 'sn': 'FOC85389QXB',", "'vid': 'V01', 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1", "Supply 1', 'pid': 'C3KX-PWR-350WAC', 'sn': 'DTN1504L0E9', 'vid': 'V01D ', },", "None self.assertEqual(parsed_output, self.golden_parsed_output) def test_empty(self): self.device1 = Mock(**self.empty_output) cpu_platform_obj =", "\"1\", \"descr\": \"VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/", "Te2/1\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/1\" PID: X2-10GB-SR , VID:", "Card 4 EARL sub-module of 1\", DESCR: \"VS-F6K-PFC4 Policy Feature", "0', 'pid': 'CISCO3845-MB', 'sn': 'FOC729346GQ', 'vid': 'V09 ', }, },", "\"usecs\": 976, \"tty\": 0, \"one_min_cpu\": 0.07, \"process\": \"OSPF-1 Hello\", \"five_min_cpu\":", "self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch(test_show_switch_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj =", "\"SAL13516QS8\", \"vid\": \"V06\", } } }, } } }, },", "Engine 720 Rev. 5.6\", \"pid\": \"WS-SUP720-3BXL\", \"vid\": \"V05\", \"sn\": \"SAL11434P2C\",", "Mock(**self.empty_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def", "obj.parse(subslot='0/1') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_subslot) def test_golden_slot_internal(self): self.device =", "def test_golden_serdes(self): self.device = Mock(**self.golden_output_serdes) obj = ShowPlatformHardwareSerdes(device=self.device) parsed_output =", ", SN: ONT1702033D NAME: \"2\", DESCR: \"WS-X6816-10GE CEF720 16 port", "'''\\ Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE", "Version 15.2(3r)E, RELEASE SOFTWARE (fc1) R5 uptime is 9 weeks,", "number : 73-15476-04 Motherboard serial number : FDO202907UH Model revision", "PID: WS-X6824-SFP , VID: V01, SN: SAL17152EG9 NAME: \"WS-F6K-DFC4-A Distributed", "3321960, \"usecs\": 109, \"tty\": 0, \"one_min_cpu\": 0.54, \"process\": \"PIM Process\",", "number : FDO172217ED System serial number : FDO1633Q14S Top Assembly", "\"V06 \", } }, \"4\": { \"X2-10GB-SR\": { \"descr\": \"X2", "test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowProcessesCpuSorted(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output", "Information : ------------------------------ Available system uptime = 0 minutes Switchovers", "from genie.libs.parser.ios.show_platform import ShowVersion,\\ Dir,\\ ShowRedundancy,\\ ShowInventory,\\ ShowBootvar, \\ ShowProcessesCpuSorted,\\", "def test_golden(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) platform_obj =", "def test_golden_slot_internal(self): self.device = Mock(**self.golden_output_slot_internal) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output =", "}, }, }, '1': { 'lc': { 'NM-1T3/E3=': { 'descr':", "None self.dev_iosv = Mock(**self.golden_output_iosv) platform_obj = ShowBootvar(device=self.dev_iosv) parsed_output = platform_obj.parse()", "9K66Z7TOKAACDEQA24N7S 6 Gigabit Ethernet interfaces DRAM configuration is 72 bits", "'Permanent', 'next_reload_license_level': 'datak9', }, 'ipbase': { 'license_level': 'ipbasek9', 'license_type': 'Permanent',", ", SN: ONT170202UU NAME: \"Transceiver Te2/4\", DESCR: \"X2 Transceiver 10Gbase-SR", "\"1000BaseSX SFP\" PID: GLC-SX-MMD , VID: V01 , SN: ACW102938VS", "AC Power Supply 1', 'pid': 'PWR-3900-AC', 'sn': 'QCS1604P0BT', 'vid': 'V03", "CPU Daughterboard Rev. 2.0\", \"name\": \"msfc sub-module of 1\", \"pid\":", "V06 , SN: ONT17020338 NAME: \"Transceiver Te2/2\", DESCR: \"X2 Transceiver", "of 1\", DESCR: \"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\" PID: VS-F6K-MSFC5", "28 2010 (SP by power on) System image file is", "golden_parsed_output_9 = { 'main': { 'chassis': { 'CISCO3845': { 'descr':", "'Virtual Private Network (VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-3',", "Te2/6\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/6\" PID: X2-10GB-SR , VID:", "FRU 1\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\", } }", "}, }, 'slot': { '0': { 'other': { 'AIM-VPN/SSL-2': {", "\"other\": { \"CLK-7600 1\": { \"name\": \"CLK-7600 1\", \"descr\": \"OSR-7600", "SIMM with four DSPs', 'name': 'PVDMII DSP SIMM with four", "\"pid\": \"WS-C6503-E\", \"vid\": \"V03\", \"sn\": \"FXS1821Q2H9\", } } }, \"slot\":", "}, }, '2/1/1': { 'SFP-10G-LR': { 'descr': 'SFP-10GBase-LR', 'name': 'TenGigabitEthernet2/1/1',", "System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1) BOOTLDR: s72033_rp Software", "Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1) sample_switch uptime is", "'next_reload_license_level': 'ipservices', 'number_of_intfs': {'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2',", "1 30 WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''}", "= ShowPlatform(device=self.dev_asr1k) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_asr1k) class test_show_platform_power(test_show_platform_power_iosxe): def", "\"lc\": { \"WS-X6748-GE-TX\": { \"name\": \"4\", \"descr\": \"WS-X6748-GE-TX CEF720 48", "Voice Module - 8FXS/DID', 'name': 'High Density Voice Module -", "\"X2 Transceiver 10Gbase-SR Te2/1\", \"name\": \"Transceiver Te2/1\", \"pid\": \"X2-10GB-SR\", \"sn\":", "CISCO7604 2\" PID: PWR-2700-AC/4 , VID: V03, SN: APS17070093 '''}", "Fan Tray 1\" PID: WS-C6503-E-FAN , VID: V02, SN: DCH183500KW", "= None self.device = Mock(**self.golden_output_9) obj = ShowInventory(device=self.device) parsed_output =", "PID: CLK-7600 , VID: , SN: FXS181101V4 NAME: \"1\", DESCR:", ": 800-33746-04 Top Assembly Revision Number : B0 Version ID", "'''\\ ROM: Bootstrap program is IOSv '''} golden_parsed_output_iosv = {", "10Gbase-SR Te2/1\", \"name\": \"Transceiver Te2/1\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT17020338\", \"vid\":", "\"process\": \"IOSv e1000\", \"five_min_cpu\": 2.77, \"runtime\": 3582279, \"pid\": 84, \"five_sec_cpu\":", ", VID: V02, SN: SAL171846RF NAME: \"Transceiver Te2/1\", DESCR: \"X2", "'name': 'Six port FXO voice interface daughtercard on Slot 1", "{ 'main': { 'chassis': { 'CISCO2821': { 'descr': '2821 chassis',", "16 minutes\", \"system_image\": \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\", \"chassis\": \"WS-C6503-E\", \"main_mem\": \"983008\", \"processor_type\": \"R7000\",", "2\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\", } } },", "{ 'version': {'version_short': '15.2', 'platform': 'C3750E', 'version': '15.2(2)E8', 'image_id': 'C3750E-UNIVERSALK9-M',", "as test_show_platform_hardware_qfp_statistics_drop_iosxe,\\ TestShowEnv as test_show_env_iosxe,\\ TestShowModule as test_show_module_iosxe,\\ TestShowSwitch as", "Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2) Technical", "processes cpu sorted 5min | inc CPU CPU utilization for", "\"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1\" PID: VWIC2-2MFT-T1/E1", "does not imply third-party authority to import, export, distribute or", "= Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE", "ONT182746GZ NAME: \"1\", DESCR: \"WS-C1010XR-48FPS-I\" PID: WS-C1010XR-48FPS-I, VID: V05 ,", "Mock(**self.empty_output) obj = ShowProcessesCpu(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class", "'pid': 'CISCO3945-CHASSIS', 'sn': 'FGL161010K8', 'vid': 'V05 ', }, }, },", "= Mock(**self.golden_output_c3850) platform_obj = ShowPlatform(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_c3850)", "def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with self.assertRaises(SchemaEmptyParserError):", "\"X2 Transceiver 10Gbase-SR Te2/16\", \"name\": \"Transceiver Te2/16\", \"pid\": \"X2-10GB-SR\", \"sn\":", "def test_golden_standby_offline(self): self.device = Mock(**self.golden_output_standby_offline) obj = ShowVersionRp(device=self.device) self.maxDiff =", "\"descr\": \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6\", \"pid\":", "2 - Power Supply 1', 'pid': 'C3KX-PWR-007CBA', 'sn': 'LTP13579L3R', 'vid':", "------------------------------------------------- *0 C3900-SPE150/K9 FOC16050QP6 Technology Package License Information for Module:'c3900'", "FOC85389QXB '''} golden_parsed_output_8 = { 'main': { 'chassis': { 'CISCO3825':", "{ 1: { \"invoked\": 3321960, \"usecs\": 109, \"tty\": 0, \"one_min_cpu\":", "0 Current Software state = ACTIVE Uptime in current state", "} } }, \"PS 1 PWR-2700-AC/4\": { \"other\": { \"PS", "1.2\", \"name\": \"WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of", "} golden_output_6 = {'execute.return_value': ''' NAME: \"1\", DESCR: \"SM-ES2-16-P\" PID:", "self.maxDiff = None self.device = Mock(**self.golden_output_6) obj = ShowInventory(device=self.device) parsed_output", "}, '1': { 'lc': { 'NM-1T3/E3=': { 'descr': 'Clear/Subrate T3/E3", "5 hours, 15 minutes System returned to ROM by power", "ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k) def test_golden_ios_1(self): self.maxDiff =", "voice interface daughtercard', 'name': 'Six port FXO voice interface daughtercard", "System returned to ROM by power cycle at 21:57:23 UTC", "\"vid\": \"V05 \", } }, \"6\": { \"X2-10GB-SR\": { \"descr\":", "power supply for CISCO7604 1\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\":", "VID: V07 , SN: FTX1234AMWT NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48", "\"VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS Rev.", "3.4\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V04\", \"sn\": \"SAL14017TWF\", \"subslot\": { \"0\":", "obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class", "'returned_to_rom_at': '10:26:47 EST Mon Dec 9 2019', 'returned_to_rom_by': 'reload', 'rom':", "3 (Read/Write) Configuration register is 0x0'''} golden_parsed_output_ios = { 'version':", "{ 'descr': 'ABC Power Supply', 'name': 'Switch 1 - Power", "'pid': 'EVM-HD-8FXS/DID', 'sn': 'FOC65798TG8', 'subslot': { '1': { 'EM-HDA-6FXO': {", "'image_id': 'C3900-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'Reload Command', 'last_reload_type': 'Normal", "}, \"3\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/3\",", "= version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k) def test_golden_ios_1(self): self.maxDiff = None self.dev_iosv", "Hardware Mode = Simplex Maintenance Mode = Disabled Communications =", "T3/E3 WAN on Slot 1', 'pid': 'NM-1T3/E3=', 'sn': 'FOC28476ADM', 'vid':", "Slot 0 SubSlot 1', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675W3E', 'vid': 'V01", "= ShowPlatformHardwarePlim(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(port='0/0/0') class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def", "ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware(test_show_platform_hardware_iosxe): def test_golden_active(self):", "DESCR: \"2700W AC power supply for CISCO7604 2\" PID: PWR-2700-AC/4", "'GE-DCARD-ESW', 'sn': 'FOC91864MNN', 'vid': 'V01 ', }, }, }, 'vid':", "\"0\", \"available_system_uptime\": \"0 minutes\", \"communications\": \"Down\", \"hw_mode\": \"Simplex\", \"communications_reason\": \"Failure\",", "\"VS-F6K-MSFC5\": { \"descr\": \"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\", \"name\": \"msfc", "and local country laws. By using this product you agree", "}, }, }, }, } golden_output_7 = {'execute.return_value': ''' NAME:", "\"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202UU\", \"vid\": \"V06 \", } }, \"4\":", "None self.dev_iosv = Mock(**self.golden_output_iosv) dir_obj = Dir(device=self.dev_iosv) parsed_output = dir_obj.parse()", "Module - 8FXS/DID\" PID: EVM-HD-8FXS/DID , VID: V04 , SN:", "4', 'pid': 'PVDM2-64', 'sn': 'FOC63358WSI', 'vid': 'V01 ', }, },", "\"WS-C6503-E\", \"main_mem\": \"983008\", \"processor_type\": \"R7000\", 'sp_by': 'power on', 'returned_to_rom_at': '21:57:23", "Card 3 Rev. 1.1\" PID: WS-F6700-DFC3CXL , VID: V01, SN:", "\"sn\": \"FXS1712Q1R8\", } } }, \"slot\": { \"CLK-7600 1\": {", "self.assertRaises(AttributeError): parsered_output = version_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) version_obj", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_serdes) def test_empty(self): self.device1 = Mock(**self.empty_output)", "0x0'''} golden_parsed_output_ios = { 'version': {'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M)", "Interface Card on Slot 0 SubSlot 3', 'pid': 'HWIC-2FE', 'sn':", "Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport", "Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0')", "10Gbase-SR Te2/3\", \"name\": \"Transceiver Te2/3\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202UU\", \"vid\":", "SN: FOC758693YO NAME: \"Clear/Subrate T3/E3 WAN on Slot 1\", DESCR:", "0 Standby failures = 0 Last switchover reason = unsupported", "Card on Slot 0 SubSlot 3\", DESCR: \"Two-Port Fast Ethernet", "for five seconds: 13%/0%; one minute: 23%; five minutes: 15%", "<EMAIL>. Cisco IOSv (revision 1.0) with with 435457K/87040K bytes of", "}, } golden_output_2 = {'execute.return_value': ''' NAME: \"WS-C6504-E\", DESCR: \"Cisco", "as show_platform_hardware_qfp_interface_iosxe,\\ TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\\ TestShowEnv as test_show_env_iosxe,\\ TestShowModule as", "FRU 1\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\", } }", "self.golden_parsed_output_ios) def test_golden_ios_cat6k(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_cat6k) version_obj", "V02, SN: SAL17152QB3 NAME: \"WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL", "chassis', 'pid': 'CISCO2821', 'sn': 'FTX1234AMWT', 'vid': 'V07 ', }, },", "\"High Speed Fan Module for CISCO7604 1\" PID: FAN-MOD-4HS ,", "None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) parsed_output =", "Code Number : CMMFF00ARC Hardware Board Revision Number : 0x04", "None self.assertEqual(parsed_output, self.golden_parsed_output_standby) def test_golden_standby_offline(self): self.device = Mock(**self.golden_output_standby_offline) obj =", "'''\\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 12.2(55)SE8, RELEASE", "{'execute.return_value': ''' CPU utilization for five seconds: 4%/0%; one minute:", "Motherboard with Gigabit Ethernet\" PID: CISCO3845-MB , VID: V09 ,", ", SN: AGA1515XZE2 NAME: \"Transceiver Te2/6\", DESCR: \"X2 Transceiver 10Gbase-SR", "'C3KX-PWR-007CBA': { 'descr': 'BCA Power Supply', 'name': 'Switch 2 -", "reason This product contains cryptographic features and is subject to", "NAME: \"2821 chassis\", DESCR: \"2821 chassis\" PID: CISCO2821 , VID:", "CPU utilization for five seconds: 13%/0%; one minute: 23%; five", "five seconds: 4%/0%; one minute: 4%; five minutes: 9% PID", "PID: WS-X6748-GE-TX , VID: V04, SN: SAL14017TWF NAME: \"WS-F6700-CFC Centralized", "NAME: \"1\", DESCR: \"WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev.", "Module for CISCO7604 1\" PID: FAN-MOD-4HS , VID: V01, SN:", "0 (Read/Write) 0K bytes of ATA CompactFlash 1 (Read/Write) 0K", "device=self.device) parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') self.assertEqual(parsed_output, self.golden_parsed_output) class", "Cisco Internetwork Operating System Software IOS (tm) s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M),", "ShowPlatformHardwareSerdesInternal,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\\ ShowPlatformHardwareQfpStatisticsDrop,\\ ShowEnvironment,\\ ShowModule,\\ ShowSwitch, ShowSwitchDetail from genie.libs.parser.iosxe.tests.test_show_platform", "BOOTLDR: s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) cat6k_tb1", "VID: V09 , SN: FOC729346GQ NAME: \"Virtual Private Network (VPN)", "Process 84 3582279 1466728 2442 0.55% 0.87% 2.77% 0 IOSv", "'1/0/49': { 'GLC-SX-MMD': { 'descr': '1000BaseSX SFP', 'name': 'GigabitEthernet1/0/49', 'pid':", "PID: GLC-SX-MMD , VID: V01 , SN: ACW102938VS '''} golden_parsed_output_4", "SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2006 by cisco", "3 Gigabit Ethernet interfaces 1 Virtual Private Network (VPN) Module", "} golden_output_iosv = {'execute.return_value': '''\\ Directory of flash0:/ 1 drw-", "{'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version ' '15.2(3r)E, RELEASE SOFTWARE", "Policy Feature Card 4 Rev. 2.0\", \"name\": \"VS-F6K-PFC4 Policy Feature", "None self.device = Mock(**self.golden_output_active_ipm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output =", "U (2091, 3086) on Slot 0 SubSlot 1\", DESCR: \"Wan", "port FXO voice interface daughtercard', 'name': 'Six port FXO voice", "class TestShowInventory(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output =", "T3/E3 WAN\" PID: NM-1T3/E3= , VID: V01 , SN: FOC28476ADM", "with self.assertRaises(SchemaEmptyParserError): parsed_output = cpu_platform_obj.parse() class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def test_empty(self): self.dev", ", VID: V05, SN: SAL11434P2C NAME: \"msfc sub-module of 1\",", "} } }, } } }, }, } golden_output_2 =", "Permanent Next reload license Level: ipservices cisco WS-C3750X-24S (PowerPC405) processor", "{ '3': { 'HWIC-2FE': { 'descr': 'Two-Port Fast Ethernet High", "None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output = obj.parse()", "}, } def test_empty(self): self.dev1 = Mock(**self.empty_output) version_obj = ShowVersion(device=self.dev1)", "\"WS-X6748-GE-TX\": { \"name\": \"4\", \"descr\": \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb", "'chassis_sn': 'FDO1633Q14S', 'number_of_intfs': { 'Virtual Ethernet': '14', 'FastEthernet': '1', 'Gigabit", "{ '2': { 'C3KX-PWR-007CBA': { 'descr': 'BCA Power Supply', 'name':", ", VID: V04 , SN: FOC65798TG8 NAME: \"Six port FXO", "as test_show_processes_cpu_history_iosxe,\\ TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\\ TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap as", "'sn': '9K66Z7TOKAACDEQA24N7S', 'vid': '1.0', }, }, }, } golden_output_iosv =", "of ATA System CompactFlash 0 (Read/Write) 0K bytes of ATA", "Module for CISCO7604 1\", \"pid\": \"FAN-MOD-4HS\", \"vid\": \"V01\", \"sn\": \"DCH170900PF\",", "status='active', interface='gigabitEthernet 0/0/0') self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def test_empty(self): self.device", "{ 'NM-16ESW': { 'descr': '16 Port 10BaseT/100BaseTX EtherSwitch', 'name': '16", "\"five_sec_cpu_interrupts\": 0 } golden_output = {'execute.return_value': '''\\ show processes cpu", "SN: FOC98675W3E NAME: \"Virtual Private Network (VPN) Module on Slot", "test_show_module(test_show_module_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowModule(device=self.dev1) with", "is 72 bits wide with parity disabled. 256K bytes of", "0/0/0') self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def test_empty(self): self.device = Mock(**self.empty_output)", "country laws governing import, export, transfer and use. Delivery of", "of memory. Processor board ID FGL161010K8 2 FastEthernet interfaces 3", "\"c3845 Motherboard with Gigabit Ethernet on Slot 0\", DESCR: \"c3845", "5 ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5\",", "platform_obj = ShowPlatform(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_c3850) def test_golden_asr1k(self):", "\"SFP-10GBase-LR\" PID: SFP-10G-LR , VID: V02 , SN: ONT182746GZ NAME:", "parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_ios(self): self.maxDiff = None", "\"WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1\", \"name\": \"switching engine", "'SFP-10GBase-LR', 'name': 'TenGigabitEthernet2/1/1', 'pid': 'SFP-10G-LR', 'sn': 'ONT182746GZ', 'vid': 'V02 ',", "def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdesInternal(device=self.device1) with self.assertRaises(SchemaEmptyParserError):", "Top Assembly Part Number : 800-33746-04 Top Assembly Revision Number", "'returned_to_rom_by': 'reload', 'rom': 'System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)',", ": ------------------------------- Active Location = slot 0 Current Software state", "= Mock(**self.empty_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse()", "obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_6) def test_golden_output_7(self):", "'CISCO3945-CHASSIS', 'chassis_sn': 'FGL161010K8', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Fri 05-Aug-11 00:32', 'curr_config_register':", "{ \"last_modified_date\": \"Jan 30 2013 00:00:00 +00:00\", \"index\": \"1\", \"size\":", "Uptime in current state = 1 day, 16 hours, 42", "= None self.dev_asr1k = Mock(**self.golden_output_asr1k) platform_obj = ShowPlatform(device=self.dev_asr1k) parsed_output =", "Interface Card BRI U (2091, 3086)', 'name': 'Wan Interface Card", "self.assertEqual( parsed_output, self.golden_parsed_output_slot_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj =", "of 4\", DESCR: \"WS-F6700-CFC Centralized Forwarding Card Rev. 4.1\" PID:", "\"rp\": { \"WS-SUP720-3BXL\": { \"name\": \"1\", \"descr\": \"WS-SUP720-3BXL 2 ports", "1.1) with C3900-SPE150/K9 with 2027520K/69632K bytes of memory. Processor board", "V00 , SN: FDO123R12W NAME: \"Switch 1 - Power Supply", "daughtercard on Slot 1 SubSlot 1\", DESCR: \"Six port FXO", "= None self.device = Mock(**self.golden_output) platform_obj = ShowProcessesCpuHistory(device=self.device) parsed_output =", "'12.2' } } golden_output_ios = {'execute.return_value': '''\\ Cisco IOS Software,", "prod_rel_team Configuration register = 0x0 Peer (slot: 0) information is", "revision number : A0 Motherboard revision number : A0 Model", "}, 'license_package': { 'data': { 'license_level': 'datak9', 'license_type': 'Permanent', 'next_reload_license_level':", "Te2/3\", \"name\": \"Transceiver Te2/3\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202UU\", \"vid\": \"V06", "----- ----- ---------- ---------- * 1 30 WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M", "weeks, 5 days, 5 hours, 16 minutes\", \"system_image\": \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\", \"chassis\":", "test_golden_active_ipm(self): self.device = Mock(**self.golden_output_active_ipm) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output = obj.parse(status='active',", "\", } }, \"5\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver", "\"Transceiver Te2/2\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702020H\", \"vid\": \"V06 \", }", "chassis\", DESCR: \"2821 chassis\" PID: CISCO2821 , VID: V07 ,", "2 SubSlot 0\", DESCR: \"Gigabit(1000BaseT) module for EtherSwitch NM\" PID:", "TTY Process 368 362874 3321960 109 1.03% 0.54% 0.48% 0", "\"-rw-\" }, \"config\": { \"last_modified_date\": \"Oct 14 2013 00:00:00 +00:00\",", "{ 'CISCO2821': { 'descr': '2821 chassis', 'name': '2821 chassis', 'pid':", "interfaces 1 FastEthernet interface 28 Gigabit Ethernet interfaces 2 Ten", "ROM: Bootstrap program is IOSv N95_1 uptime is 1 day,", "1\": { \"other\": { \"CLK-7600 1\": { \"name\": \"CLK-7600 1\",", "SN: SAL14017TWF NAME: \"WS-F6700-CFC Centralized Forwarding Card EARL sub-module of", "unsupported Hardware Mode = Simplex Maintenance Mode = Disabled Communications", "with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_semi_empty(self): self.dev2 = Mock(**self.semi_empty_output)", "Permanent securityk9 uc None None None data datak9 Permanent datak9", "'Bootstrap program is C3750E boot loader', 'bootldr': 'C3750E Boot Loader", "EARL sub-module of 3\", DESCR: \"WS-F6K-DFC4-A Distributed Forwarding Card 4", "golden_output_3 = {'execute.return_value': ''' # show inventory NAME: \"WS-C6503-E\", DESCR:", "version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios) def test_golden_ios_cat6k(self): self.maxDiff = None self.dev_iosv =", "}, \"next_reload_boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\" } golden_output_iosv = {'execute.return_value': '''\\ BOOT variable", "3 Rev. 1.1\" PID: WS-F6700-DFC3CXL , VID: V01, SN: SAL1214LAG5", "''' NAME: \"WS-C6504-E\", DESCR: \"Cisco Systems Cisco 6500 4-slot Chassis", "}, '1/0/49': { 'GLC-SX-MMD': { 'descr': '1000BaseSX SFP', 'name': 'GigabitEthernet1/0/49',", "} } }, } } }, \"WS-C6503-E-FAN 1\": { \"other\":", "EVM-HD-8FXS/DID , VID: V04 , SN: FOC65798TG8 NAME: \"Six port", "\", } }, \"16\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver", ", VID: V01, SN: SAL17152EG9 NAME: \"WS-F6K-DFC4-A Distributed Forwarding Card", "= obj.parse(subslot='0/1') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_subslot) def test_golden_slot_internal(self): self.device", "9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0\" PID: IOSv , VID: 1.0, SN:", "Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(status='active')", "platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def test_empty(self):", "bytes of non-volatile configuration memory. 2097152K bytes of ATA System", "test_golden_output_7(self): self.maxDiff = None self.device = Mock(**self.golden_output_7) obj = ShowInventory(device=self.device)", "0 SubSlot 1\", DESCR: \"Wan Interface Card BRI U (2091,", "'Fri 05-Aug-11 00:32', 'curr_config_register': '0x2102', 'hostname': 'best-c3945-IOS3', 'image_id': 'C3900-UNIVERSALK9-M', 'image_type':", "Card 4 EARL sub-module of 3\", DESCR: \"WS-F6K-DFC4-A Distributed Forwarding", "EtherSwitch NM on Slot 2 SubSlot 0', 'pid': 'GE-DCARD-ESW', 'sn':", "power on) System image file is \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\" This product contains", "obj.parse(port='0/0/0') class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def test_golden_active_opm(self): self.device = Mock(**self.golden_output_active_opm) obj =", "Centralized Forwarding Card EARL sub-module of 4\", \"pid\": \"WS-F6700-CFC\", \"sn\":", "uptime is 10 weeks, 5 days, 5 hours, 16 minutes", "Rev. 1.2\" PID: WS-F6K-DFC4-E , VID: V02, SN: SAL171846RF NAME:", "Support: http://www.cisco.com/techsupport Copyright (c) 1986-2006 by cisco Systems, Inc. Compiled", ": FDO1633Q14M Model revision number : A0 Motherboard revision number", "\"vid\": \"V01\", \"sn\": \"ABC0830J127\", } } }, }, } golden_output_3", "version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k) def test_golden_ios_1(self):", "V06 , SN: FNS153920YJ NAME: \"Transceiver Te2/16\", DESCR: \"X2 Transceiver", "parsed_output = redundancy_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class TestShowInventory(unittest.TestCase): dev1 = Device(name='empty')", "self.assertEqual(parsed_output, self.golden_parsed_output_2) def test_golden_output_3(self): self.maxDiff = None self.device = Mock(**self.golden_output_3)", "4.1\" PID: WS-F6700-CFC , VID: V06, SN: SAL13516QS8 NAME: \"FAN-MOD-4HS", "Gigabit Ethernet\" PID: CISCO3845-MB , VID: V09 , SN: FOC729346GQ", "golden_parsed_output_5 = { 'main': { 'chassis': { 'CISCO3945-CHASSIS': { 'descr':", "23-Nov-06 06:26', \"image_type\": \"production image\", \"rom\": \"System Bootstrap, Version 12.2(17r)S4,", "\"index\": \"269\", \"size\": \"119\", \"permissions\": \"-rw-\" }, \"config\": { \"last_modified_date\":", "'Gigabit Ethernet': '3', }, 'os': 'IOS', 'platform': 'C3900', 'processor_board_flash': '2000880K',", "of flash0:/ 1 drw- 0 Jan 30 2013 00:00:00 +00:00", "SN ------------------------------------------------- *0 C3900-SPE150/K9 FOC16050QP6 Technology Package License Information for", "weeks, 3 days, 10 hours, 27 minutes', 'returned_to_rom_by': 'power-on', 'system_restarted_at':", "} }, \"1\": { \"rp\": { \"VS-SUP2T-10G\": { \"name\": \"1\",", "} def test_empty(self): self.dev1 = Mock(**self.empty_output) version_obj = ShowVersion(device=self.dev1) with", "0x05 Switch Ports Model SW Version SW Image ------ -----", "parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_iosv =", "'sp_by': 'power on', 'returned_to_rom_at': '21:57:23 UTC Sat Aug 28 2010',", ": 800-38990-01 Top Assembly Revision Number : F0 Version ID", "3086)\" PID: WIC-1B-U-V2 , VID: V01, SN: 10293847 NAME: \"PVDMII", "WS-X6824-SFP , VID: V01, SN: SAL17152EG9 NAME: \"WS-F6K-DFC4-A Distributed Forwarding", "\"Stacking Module\" PID: C1010X-STACK , VID: V02 , SN: FD232323XXZ", "V05 , SN: AGA1515XZE2 NAME: \"Transceiver Te2/6\", DESCR: \"X2 Transceiver", "'name': '2', 'pid': 'WS-C3210X-48T-S', 'sn': 'FD5678Z90P', 'subslot': { '2': {", "on Slot 0 SubSlot 1\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48", "Slot 0 SubSlot 1', 'pid': 'WIC-1B-U-V2', 'sn': '10293847', 'vid': 'V01',", ", VID: V01 , SN: FOC98675W3E NAME: \"Virtual Private Network", "Mock from pyats.topology import Device from genie.metaparser.util.exceptions import SchemaEmptyParserError,\\ SchemaMissingKeyError", "\"CLK-7600 2\", DESCR: \"OSR-7600 Clock FRU 2\" PID: CLK-7600 ,", "\"other\": { \"FAN-MOD-4HS 1\": { \"name\": \"FAN-MOD-4HS 1\", \"descr\": \"High", "WS-X6748-GE-TX , VID: V04, SN: SAL14017TWF NAME: \"WS-F6700-CFC Centralized Forwarding", "NAME: \"IOSv\", DESCR: \"IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision:", "Software (C3750E-UNIVERSALK9-M), Version 12.2(55)SE8, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport", "{ \"0\": { \"WS-F6K-DFC4-A\": { \"descr\": \"WS-F6K-DFC4-A Distributed Forwarding Card", "'power on', 'returned_to_rom_at': '21:57:23 UTC Sat Aug 28 2010', 'returned_to_rom_by':", "None self.device = Mock(**self.golden_output) platform_obj = ShowProcessesCpuHistory(device=self.device) parsed_output = platform_obj.parse()", "of memory. Processor board ID FDO1633Q14S Last reset from power-on", "power supply for CISCO7604 2\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\":", "Card Rev. 4.1\", \"name\": \"WS-F6700-CFC Centralized Forwarding Card EARL sub-module", "} golden_output_1 = {'execute.return_value': ''' CPU utilization for five seconds:", "failures = 0 Last switchover reason = unsupported Hardware Mode", "{ 'CISCO3845-MB': { 'descr': 'c3845 Motherboard with Gigabit Ethernet', 'name':", "{ \"descr\": \"X2 Transceiver 10Gbase-SR Te2/6\", \"name\": \"Transceiver Te2/6\", \"pid\":", "\"Transceiver Te2/16\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/16\" PID: X2-10GB-SR ,", "'license_type': 'Permanent', 'next_reload_license_level': 'ipservices', 'chassis': 'WS-C3750X-24S', 'main_mem': '524288', 'processor_type': 'PowerPC405',", "Mock(**self.device_output) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.parsed_output) class", "empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { \"red_sys_info\": { \"last_switchover_reason\":", "Supervisor Engine 720 Rev. 5.6\" PID: WS-SUP720-3BXL , VID: V05,", "weeks, 4 days, 2 hours, 3 minutes', 'version': '12.2(55)SE8', 'version_short':", "self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden_c3850(self): self.maxDiff = None self.dev_c3850", "= { 'version': {'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version '", ", VID: V06 , SN: ONT170201TT NAME: \"3\", DESCR: \"WS-X6824-SFP", "\"vid\": \"\", \"sn\": \"FXS170802GL\", } } }, \"CLK-7600 2\": {", "'Switch 1 - Power Supply 1', 'pid': 'PWR-C2-2929WAC', 'sn': 'LIT03728KKK',", "AIM Element', 'name': 'Virtual Private Network (VPN) Module on Slot", "368, \"five_sec_cpu\": 1.03 }, 2: { \"invoked\": 1466728, \"usecs\": 2442,", "Reload', 'license_udi': { 'device_num': { '*0': { 'pid': 'C3900-SPE150/K9', 'sn':", "genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as test_show_platform_iosxe,\\ TestShowPlatformPower as test_show_platform_power_iosxe,\\ TestShowVersionRp as", "'FTX1234AMWT', 'vid': 'V07 ', }, }, }, 'slot': { '0':", "self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class TestShowInventory(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv')", "NAME: \"Transceiver Te2/2\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/2\" PID: X2-10GB-SR", "X2-10GB-SR , VID: V05 , SN: AGA1515XZE2 NAME: \"Transceiver Te2/6\",", "10BaseT/100BaseTX EtherSwitch on Slot 2\", DESCR: \"16 Port 10BaseT/100BaseTX EtherSwitch\"", "prod_rel_team ROM: Bootstrap program is IOSv N95_1 uptime is 1", "5 hours, 16 minutes Time since cat6k_tb1 switched to active", "= Mock(**self.golden_output) platform_obj = ShowPlatformPower(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output)", ", VID: V02 , SN: ONT182746GZ NAME: \"1\", DESCR: \"WS-C1010XR-48FPS-I\"", "Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(", "} }, \"PS 1 PWR-2700-AC/4\": { \"other\": { \"PS 1", "24 port 1000mb SFP Rev. 1.0\" PID: WS-X6824-SFP , VID:", "- T1/E1\" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675W3E", "= {'execute.return_value': '''\\ Directory of flash0:/ 1 drw- 0 Jan", "= Mock(**self.golden_output_iosv) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv)", "with U.S. and local country laws. By using this product", "Clock FRU 2\" PID: CLK-7600 , VID: , SN: FXS170802GL", "PID: CLK-7600 , VID: , SN: FXS181101V4 NAME: \"CLK-7600 2\",", "\"IOSv e1000\", \"five_min_cpu\": 2.77, \"runtime\": 3582279, \"pid\": 84, \"five_sec_cpu\": 0.55", "\"process\": \"OSPF-1 Hello\", \"five_min_cpu\": 0.07, \"runtime\": 113457, \"pid\": 412, \"five_sec_cpu\":", "non-volatile configuration memory. 8192K bytes of packet buffer memory. 65536K", "Thu 23-Nov-06 06:26 by kellythw Image text-base: 0x40101040, data-base: 0x42D98000", "05:06:40 GMT Tue Sep 10 2019 System image file is", "\"WS-C6503-E-FAN 1\", DESCR: \"Enhanced 3-slot Fan Tray 1\" PID: WS-C6503-E-FAN", "{ 'descr': 'Clear/Subrate T3/E3 WAN', 'name': 'Clear/Subrate T3/E3 WAN on", "\"sn\": \"APS17070093\", } } }, \"1\": { \"rp\": { \"VS-SUP2T-10G\":", "High Speed WAN Interface Card on Slot 0 SubSlot 3\",", "CompactFlash 3 (Read/Write) Configuration register is 0x0'''} golden_parsed_output_ios = {", "obj = ShowPlatformHardwarePlim(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(port='0/0/0') class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe):", "def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitch(device=self.dev1) with self.assertRaises(SchemaEmptyParserError):", "IOS Software, C3900 Software (C3900-UNIVERSALK9-M), Version 15.0(1)M7, RELEASE SOFTWARE (fc2)", "test_show_platform_software_slot_active_monitor_Mem_iosxe,\\ TestShowPlatformHardware as test_show_platform_hardware_iosxe,\\ TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\\ TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\\", "\"Failure\", \"standby_failures\": \"0\" }, \"slot\": { \"slot 0\": { \"image_ver\":", "command This product contains cryptographic features and is subject to", "}, } def test_empty(self): self.dev1 = Mock(**self.empty_output) inventory_obj = ShowInventory(device=self.dev1)", "'processor_board_flash': '10080K', 'returned_to_rom_by': 'reload', \"main_mem\": \"435457\", \"mem_size\": { \"non-volatile configuration\":", "EST Mon Dec 9 2019 System restarted at 10:27:57 EST", "{ 'EVM-HD-8FXS/DID': { 'descr': 'High Density Voice Module - 8FXS/DID',", "0 SubSlot 3\", DESCR: \"Two-Port Fast Ethernet High Speed WAN", "1\" PID: CLK-7600 , VID: , SN: FXS181101V4 NAME: \"CLK-7600", "SN: FOC98675U0D NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk -", "= ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual( parsed_output,", "{ 'other': { 'EVM-HD-8FXS/DID': { 'descr': 'High Density Voice Module", "'C3750E-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent',", "port FXO voice interface daughtercard on Slot 1 SubSlot 1',", "{ \"Gigabit Ethernet\": \"6\" }, \"version\": \"15.6(3)M2\", \"rtr_type\": \"IOSv\", \"chassis_sn\":", "2\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\", } } },", "platform_obj = ShowModule(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self):", "weeks, 5 days, 5 hours, 15 minutes System returned to", "ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0', internal=True) self.maxDiff = None self.assertEqual( parsed_output,", "self.dev = Mock(**self.golden_output_1) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output,", "'vid': 'V05 ', }, }, }, 'slot': { '0': {", "'vid': 'V01', }, }, }, '1': { 'lc': { 'NM-1T3/E3=':", "ShowPlatform(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_semi_empty(self): self.dev2 =", ", SN: FOC28476ADM NAME: \"16 Port 10BaseT/100BaseTX EtherSwitch on Slot", "= None self.assertEqual(parsed_output, self.golden_parsed_output) def test_empty(self): self.device1 = Mock(**self.empty_output) cpu_platform_obj", "VID: , SN: SAL11434N9G NAME: \"switching engine sub-module of 1\",", "\"production image\", 'processor_board_flash': '10080K', 'returned_to_rom_by': 'reload', \"main_mem\": \"435457\", \"mem_size\": {", "ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\\ ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\\ TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\\ TestShowEnv", "'V00 ', }, 'WS-C1010XR-48FPS-I': { 'descr': 'WS-C1010XR-48FPS-I', 'name': '1', 'pid':", "number : A0 Model number : WS-C3750X-24S-E Daughterboard assembly number", "self.device = Mock(**self.golden_output) platform_obj = ShowProcessesCpuHistory(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,", "ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_8) def test_golden_output_9(self): self.maxDiff =", "CMMPP00DRB Hardware Board Revision Number : 0x05 Switch Ports Model", "\"CLK-7600 1\", \"descr\": \"OSR-7600 Clock FRU 1\", \"pid\": \"CLK-7600\", \"vid\":", "{ \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/3\", \"name\": \"Transceiver", "3.1\" PID: WS-SUP720 , VID: , SN: SAL11434N9G NAME: \"switching", "V01, SN: SAL1214LAG5 NAME: \"WS-C6503-E-FAN 1\", DESCR: \"Enhanced 3-slot Fan", "Motherboard assembly number : 73-15476-04 Motherboard serial number : FDO202907UH", "obj = ShowEnvironment(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self):", "B0 Model number : WS-C3750X-24P-L Daughterboard assembly number : 800-32727-03", "\"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\", } } }, \"1\":", "\"0\": { \"VS-F6K-MSFC5\": { \"descr\": \"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\",", "with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff = None", "bytes of non-volatile configuration memory. 8192K bytes of packet buffer", "'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'main_mem': '262144', 'mem_size': {'flash-simulated", "Wed 29-Mar-17 14:05 by prod_rel_team Configuration register = 0x0 Peer", "SFP Rev. 1.0\" PID: WS-X6824-SFP , VID: V01, SN: SAL17152EG9", "2.0\" PID: WS-X6816-10GE , VID: V02, SN: SAL17152QB3 NAME: \"WS-F6K-DFC4-E", "Cisco Systems, Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team Configuration", "\"0\": { \"WS-F6K-DFC4-E\": { \"descr\": \"WS-F6K-DFC4-E Distributed Forwarding Card 4", ", SN: FXS181101V4 NAME: \"1\", DESCR: \"WS-SUP720-3BXL 2 ports Supervisor", "Port 10BaseT/100BaseTX EtherSwitch on Slot 2', 'pid': 'NM-16ESW', 'sn': 'FOC135464KO',", "show_platform_hardware_qfp_interface_iosxe,\\ TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\\ TestShowEnv as test_show_env_iosxe,\\ TestShowModule as test_show_module_iosxe,\\", "SAL17152QB3 NAME: \"WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of", "= ShowPlatform(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_c3850) def test_golden_asr1k(self): self.maxDiff", ", VID: V04, SN: SAL14017TWF NAME: \"WS-F6700-CFC Centralized Forwarding Card", "program is C3750E boot loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M)", ", SN: FTX6666ARJ9 NAME: \"c3845 Motherboard with Gigabit Ethernet on", "10 hours, 27 minutes System returned to ROM by power-on", "Card EARL sub-module of 4\", \"pid\": \"WS-F6700-CFC\", \"sn\": \"SAL13516QS8\", \"vid\":", "TestShowModule as test_show_module_iosxe,\\ TestShowSwitch as test_show_switch_iosxe,\\ TestShowSwitchDetail as test_show_switch_detail_iosxe class", "'Wan Interface Card BRI U (2091, 3086) on Slot 0", "', }, }, }, 'slot': { '0': { 'other': {", "SubSlot 3\", DESCR: \"Two-Port Fast Ethernet High Speed WAN Interface", "WS-C3210X-48T-S , VID: V02 , SN: FD5678Z90P NAME: \"Switch 2", "'FDO1633Q14S', 'number_of_intfs': { 'Virtual Ethernet': '14', 'FastEthernet': '1', 'Gigabit Ethernet':", "= Mock(**self.empty_output) obj = ShowProcessesCpuSorted(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse()", "self.golden_parsed_output_port) def test_golden_slot(self): self.device = Mock(**self.golden_output_slot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output", "platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj", "parsed_output = platform_obj.parse( status='active', slot='0', iotype='opm') self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe):", "'Two-Port Fast Ethernet High Speed WAN Interface Card', 'name': 'Two-Port", "}, \"3\": { \"lc\": { \"WS-X6824-SFP\": { \"name\": \"3\", \"descr\":", "{'flash-simulated non-volatile configuration': '512'}, 'next_reload_license_level': 'ipservices', 'number_of_intfs': {'Gigabit Ethernet': '28',", "\"WS-C6503-E\", DESCR: \"Cisco Systems Catalyst 6500 3-slot Chassis System\" PID:", "- T1/E1 on Slot 0 SubSlot 1\", DESCR: \"VWIC2-2MFT-T1/E1 -", "DESCR: \"X2 Transceiver 10Gbase-SR Te2/2\" PID: X2-10GB-SR , VID: V06", "= None self.dev_iosv = Mock(**self.golden_output_ios_1) version_obj = ShowVersion(device=self.dev_iosv) parsed_output =", "PID: PWR-2700-AC/4 , VID: V03, SN: APS17070093 '''} golden_parsed_output_3 =", "RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2018 by", "'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 29-Mar-17 14:05', \"processor_type\": \"revision 1.0\", \"platform\":", "'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on", "<filename>src/genie/libs/parser/ios/tests/test_show_platform.py #!/bin/env python import unittest from unittest.mock import Mock from", "0.55% 0.87% 2.77% 0 IOSv e1000 412 113457 116196 976", "V09 , SN: FOC729346GQ NAME: \"Virtual Private Network (VPN) Module", "Voice Module - 8FXS/DID on Slot 1', 'pid': 'EVM-HD-8FXS/DID', 'sn':", "program is C3750E boot loader', 'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M)", "ShowSwitch(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch_detail(test_show_switch_detail_iosxe): def test_empty(self): self.dev1", "\"rom\": \"Bootstrap program is IOSv\", \"uptime\": \"1 day, 16 hours,", "Failure Current Processor Information : ------------------------------- Active Location = slot", "Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2017 by Cisco Systems, Inc.", "{ '1': { 'C1010X-STACK': { 'descr': 'Stacking Module', 'name': 'Switch", "{ \"name\": \"WS-C6503-E\", \"descr\": \"Cisco Systems Catalyst 6500 3-slot Chassis", "Transceiver 10Gbase-SR Te2/4\", \"name\": \"Transceiver Te2/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T5\",", "FD232323XXZ NAME: \"GigabitEthernet1/0/49\", DESCR: \"1000BaseSX SFP\" PID: GLC-SX-MMD , VID:", "System serial number : FDO2028F1WK Top Assembly Part Number :", "{'execute.return_value': ''' show version Cisco Internetwork Operating System Software IOS", "ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0')", "\"s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)\", \"hostname\": \"cat6k_tb1\",", "self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1)", "Support: http://www.cisco.com/techsupport Copyright (c) 1986-2013 by Cisco Systems, Inc. Compiled", "class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def test_golden_active_opm(self): self.device = Mock(**self.golden_output_active_opm) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device)", "N95_1 uptime is 1 day, 16 hours, 42 minutes System", "SubSlot 0', 'pid': 'GE-DCARD-ESW', 'sn': 'FOC91864MNN', 'vid': 'V01 ', },", "DESCR: \"Encryption AIM Element\" PID: AIM-VPN/SSL-2 , VID: V01, SN:", "Last reload reason: Unknown reason This product contains cryptographic features", "VID: V03, SN: APS1707008Y NAME: \"PS 2 PWR-2700-AC/4\", DESCR: \"2700W", "'C3KX-PWR-007CBA', 'sn': 'LTP13579L3R', 'vid': 'V01L ', }, }, '2/1/1': {", "3582279 1466728 2442 0.55% 0.87% 2.77% 0 IOSv e1000 412", "= Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active',", ", VID: V06 , SN: ONT1702020H NAME: \"Transceiver Te2/3\", DESCR:", "'ABC Power Supply', 'name': 'Switch 1 - Power Supply 1',", "AC power supply for CISCO7604 2\" PID: PWR-2700-AC/4 , VID:", "= Mock(**self.golden_output_iosv) platform_obj = ShowBootvar(device=self.dev_iosv) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv)", "}, }, '1': { 'other': { 'EVM-HD-8FXS/DID': { 'descr': 'High", "\"name\": \"Transceiver Te2/1\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT17020338\", \"vid\": \"V06 \",", "SN: ACW102938VS '''} golden_parsed_output_4 = { 'slot': { '1': {", "\"switching engine sub-module of 2\", \"pid\": \"WS-F6700-DFC3CXL\", \"sn\": \"SAL1214LAG5\", \"vid\":", "PWR-C2-2929WAC , VID: V02L , SN: LIT03728KKK NAME: \"Switch 1", "}, }, '1/0/49': { 'GLC-SX-MMD': { 'descr': '1000BaseSX SFP', 'name':", "'descr': 'PVDMII DSP SIMM with four DSPs', 'name': 'PVDMII DSP", "{ \"os\": \"IOS\", \"version_short\": \"12.2\", \"platform\": \"s72033_rp\", \"version\": \"12.2(18)SXF7\", \"image_id\":", "} } golden_output_iosv = {'execute.return_value': '''\\ Directory of flash0:/ 1", "\"X2 Transceiver 10Gbase-SR Te2/5\", \"name\": \"Transceiver Te2/5\", \"pid\": \"X2-10GB-SR\", \"sn\":", "Supply\" PID: PWR-C2-2929WAC , VID: V02L , SN: LIT03728KKK NAME:", "email to <EMAIL>. cisco WS-C6503-E (R7000) processor (revision 1.4) with", "Copyright (c) 1986-2011 by Cisco Systems, Inc. Compiled Fri 05-Aug-11", "by Cisco Systems, Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team", "9, \"one_min_cpu\": 4, \"nonzero_cpu_processes\": [ \"PIM Process\", \"IOSv e1000\", \"OSPF-1", "Ethernet High Speed WAN Interface Card', 'name': 'Two-Port Fast Ethernet", "'10:27:57 EST Mon Dec 9 2019', 'uptime': '1 hour, 20", "VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675U0D NAME: \"VWIC2-2MFT-T1/E1 -", "parsed_output = obj.parse(slot='0') class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def test_golden(self): self.device = Mock(**self.golden_output_serdes_internal)", "(fc1)', 'hostname': 'sample_switch', 'uptime': '8 weeks, 3 days, 10 hours,", "Mock(**self.golden_output_8) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_8) def", ", SN: FNS153920YJ NAME: \"Transceiver Te2/16\", DESCR: \"X2 Transceiver 10Gbase-SR", "PID: PWR-C2-2929WAC , VID: V02L , SN: LIT03728KKK NAME: \"Switch", "'returned_to_rom_by': 'power-on', 'rom': 'Bootstrap program is C3750E boot loader', 'rtr_type':", "'FOC16050QP6', 'subslot': { '3': { 'HWIC-2FE': { 'descr': 'Two-Port Fast", "\"msfc sub-module of 1\", \"pid\": \"VS-F6K-MSFC5\", \"sn\": \"SAL17142D06\", \"vid\": \"\",", "self.golden_parsed_output_iosv) def test_golden_output_2(self): self.maxDiff = None self.device = Mock(**self.golden_output_2) obj", "}, }, 'main_mem': '2027520', 'mem_size': { 'non-volatile configuration': '255', },", "}, }, }, }, }, '1': { 'other': { 'EVM-HD-8FXS/DID':", "four DSPs on Slot 0 SubSlot 4', 'pid': 'PVDM2-64', 'sn':", "} } }, } } }, \"4\": { \"lc\": {", "minutes', 'version': '12.2(55)SE8', 'version_short': '12.2' } } golden_output_ios = {'execute.return_value':", "2\": { \"other\": { \"CLK-7600 2\": { \"name\": \"CLK-7600 2\",", "laws, return this product immediately. A summary of U.S. laws", "EST Mon Dec 9 2019', 'returned_to_rom_by': 'reload', 'rom': 'System Bootstrap,", "\"slot\": { \"CLK-7600 1\": { \"other\": { \"CLK-7600 1\": {", "port 10/100/1000mb Ethernet Rev. 2.6\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V02\", \"sn\":", "\"hw_mode\": \"Simplex\", \"communications_reason\": \"Failure\", \"standby_failures\": \"0\" }, \"slot\": { \"slot", "2019 System restarted at 10:27:57 EST Mon Dec 9 2019", "= ShowVersionRp(device=self.device) self.maxDiff = None with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(rp='standby',", "UDI: ------------------------------------------------- Device# PID SN ------------------------------------------------- *0 C3900-SPE150/K9 FOC16050QP6 Technology", "NAME: \"WS-C6503-E-FAN 1\", DESCR: \"Enhanced 3-slot Fan Tray 1\" PID:", "def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformPower(device=self.device) with self.assertRaises(SchemaEmptyParserError):", "VID: V05 , SN: FTX6666ARJ9 NAME: \"c3845 Motherboard with Gigabit", "{ '0': { 'rp': { 'C3900-SPE150/K9': { 'descr': 'Cisco Services", "Rev. 2.0\" PID: VS-F6K-MSFC5 , VID: , SN: SAL17142D06 NAME:", "Gigabit Ethernet interfaces DRAM configuration is 72 bits wide with", "\"WS-X6824-SFP\", \"vid\": \"V01\", \"sn\": \"SAL17152EG9\", \"subslot\": { \"0\": { \"WS-F6K-DFC4-A\":", "file is \"flash0:/vios-adventerprisek9-m\" Last reload reason: Unknown reason This product", "Te2/5\" PID: X2-10GB-SR , VID: V05 , SN: AGA1515XZE2 NAME:", "None self.dev_iosv = Mock(**self.golden_output_ios_1) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse()", "}, '1': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48", "\"Gigabit Ethernet\": \"6\" }, \"version\": \"15.6(3)M2\", \"rtr_type\": \"IOSv\", \"chassis_sn\": \"9K66Z7TOKAACDEQA24N7S\",", "memory. Processor board ID 9K66Z7TOKAACDEQA24N7S 6 Gigabit Ethernet interfaces DRAM", "\"non-volatile configuration\": \"256\" }, \"system_image\": \"flash0:/vios-adventerprisek9-m\", \"curr_config_register\": \"0x0\", \"rom\": \"Bootstrap", "SFP', 'name': 'GigabitEthernet1/0/49', 'pid': 'GLC-SX-MMD', 'sn': 'ACW102938VS', 'vid': 'V01 ',", "Ethernet': '2', 'Gigabit Ethernet': '28', 'FastEthernet': '1' }, 'os': 'IOS',", "Motherboard serial number : FDO1633Q14M Model revision number : A0", "= ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet", "not available because it is in 'DISABLED' state '''} def", "'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'next_reload_license_level': 'ipservices', 'chassis':", "\"sn\": \"ONT1702020H\", \"vid\": \"V06 \", } }, \"3\": { \"X2-10GB-SR\":", "power supply for CISCO7604 2\" PID: PWR-2700-AC/4 , VID: V03,", "is IOSv '''} golden_parsed_output_iosv = { \"version\": { \"last_reload_reason\": \"Unknown", "Distributed Forwarding Card 4 EARL sub-module of 3\", \"pid\": \"WS-F6K-DFC4-A\",", "{ \"other\": { \"PS 2 PWR-2700-AC/4\": { \"name\": \"PS 2", "Gigabit Ethernet': '2', 'Virtual Ethernet': '2', 'Gigabit Ethernet': '28', 'FastEthernet':", "= 1 day, 16 hours, 42 minutes Image Version =", "\"WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4\", DESCR: \"WS-F6700-CFC", "loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 15.2(3r)E, RELEASE SOFTWARE", "VID: V06, SN: SAL13516QS8 NAME: \"FAN-MOD-4HS 1\", DESCR: \"High Speed", "15.6(3)M2, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2017", "Ethernet High Speed WAN Interface Card on Slot 0 SubSlot", "test_empty(self): self.dev = Mock(**self.empty_output) platform_obj = ShowBootvar(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsed_output", "with self.assertRaises(SchemaEmptyParserError): parsed_output = inventory_obj.parse() def test_golden_iosv(self): self.maxDiff = None", "def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError):", ", SN: FTX7908A3RQ NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk", "Processor Information : ------------------------------- Active Location = slot 0 Current", "Unknown reason This product contains cryptographic features and is subject", "WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_cat6k =", "\"last_modified_date\": \"Jan 30 2013 00:00:00 +00:00\", \"index\": \"1\", \"size\": \"0\",", "= Mock(**self.golden_output_active_opm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active',", "C3900-SPE150/K9 with 2027520K/69632K bytes of memory. Processor board ID FGL161010K8", "device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(status='active') def test_golden_active(self): self.maxDiff =", "10/100/1000mb Ethernet Rev. 2.6\" PID: WS-X6748-GE-TX , VID: V02, SN:", "Module\" PID: C1010X-STACK , VID: V02 , SN: FD232323XXZ NAME:", "FTX7908A3RQ NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1", "= None self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj", "inventory NAME: \"CISCO3945-CHASSIS\", DESCR: \"CISCO3945-CHASSIS\" PID: CISCO3945-CHASSIS , VID: V05", "'prod_rel_team', 'rom': 'Bootstrap program is C3750E boot loader', 'bootldr': 'C3750E", "of 3\", DESCR: \"WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0\"", "Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex", "NAME: \"Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2 SubSlot", "self.golden_parsed_output_serdes) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdes(device=self.device1) with", "{ 'descr': '2821 chassis', 'name': '2821 chassis', 'pid': 'CISCO2821', 'sn':", "by sending email to <EMAIL>. Cisco IOSv (revision 1.0) with", "(C3900-UNIVERSALK9-M), Version 15.0(1)M7, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright", "4\", \"pid\": \"WS-F6700-CFC\", \"sn\": \"SAL13516QS8\", \"vid\": \"V06\", } } },", "Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1) sample_switch uptime", "None self.dev_iosv = Mock(**self.golden_output_iosv) inventory_obj = ShowInventory(device=self.dev_iosv) parsed_output = inventory_obj.parse()", "self.device = Mock(**self.golden_output_active) obj = ShowPlatformHardware(device=self.device) parsed_output = obj.parse() self.maxDiff", "version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_ios(self): self.maxDiff = None self.dev_iosv =", "\"267\", \"size\": \"147988420\", \"permissions\": \"-rw-\" } }, \"bytes_total\": \"2142715904\", \"bytes_free\":", "\"descr\": \"X2 Transceiver 10Gbase-SR Te2/4\", \"name\": \"Transceiver Te2/4\", \"pid\": \"X2-10GB-SR\",", "NAME: \"WS-C6504-E\", DESCR: \"Cisco Systems Cisco 6500 4-slot Chassis System\"", "dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''}", "Private Network (VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-2', 'sn':", "Transceiver 10Gbase-SR Te2/2\" PID: X2-10GB-SR , VID: V06 , SN:", "= None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output =", "products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further", "golden_parsed_output_iosv = { \"red_sys_info\": { \"last_switchover_reason\": \"unsupported\", \"maint_mode\": \"Disabled\", \"switchovers_system_experienced\":", "redundancy_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) redundancy_obj", "= None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) parsed_output", "0.07, \"runtime\": 113457, \"pid\": 412, \"five_sec_cpu\": 0.15 } }, \"five_sec_cpu_total\":", "'1000BaseSX SFP', 'name': 'GigabitEthernet1/0/49', 'pid': 'GLC-SX-MMD', 'sn': 'ACW102938VS', 'vid': 'V01", "'C3900 AC Power Supply 1', 'name': 'C3900 AC Power Supply", "{ '*0': { 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6' } } },", "Ethernet interfaces 1 FastEthernet interface 28 Gigabit Ethernet interfaces 2", "Catalyst 6500 3-slot Chassis System\", \"pid\": \"WS-C6503-E\", \"vid\": \"V03\", \"sn\":", "{ 'lc': { 'NM-1T3/E3=': { 'descr': 'Clear/Subrate T3/E3 WAN', 'name':", "\"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot", "FOC65798TG8 NAME: \"Six port FXO voice interface daughtercard on Slot", "= Mock(**self.empty_output) obj = ShowPlatformHardwareSerdesInternal(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0')", "Mock(**self.golden_output_slot_internal) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0', internal=True) self.maxDiff =", "Slot 1', 'pid': 'NM-1T3/E3=', 'sn': 'FOC28476ADM', 'vid': 'V01 ', },", "SN: SAL17152QB3 NAME: \"WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module", "00:00:00 +00:00\", \"index\": \"1\", \"size\": \"0\", \"permissions\": \"drw-\" }, \"vios-adventerprisek9-m\":", "Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team Configuration register =", "Mock(**self.golden_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device) parsed_output = cpu_platform_obj.parse() self.maxDiff = None", "+00:00\", \"index\": \"268\", \"size\": \"524288\", \"permissions\": \"-rw-\" }, \"boot\": {", "self.golden_parsed_output_subslot) def test_golden_slot_internal(self): self.device = Mock(**self.golden_output_slot_internal) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output", "\"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6\", \"pid\": \"WS-X6748-GE-TX\",", "hours, 16 minutes\", \"system_image\": \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\", \"chassis\": \"WS-C6503-E\", \"main_mem\": \"983008\", \"processor_type\":", "None self.dev = Mock(**self.golden_output_1) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse()", "VID: V05, SN: SAL17152N0F NAME: \"msfc sub-module of 1\", DESCR:", "ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(port='0/0/0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_port) def", "interface='gigabitEthernet 0/0/0') def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output)", "15.0(1r)M13, RELEASE SOFTWARE (fc1) best-c3945-IOS3 uptime is 1 hour, 20", "\"V06 \", } }, \"16\": { \"X2-10GB-SR\": { \"descr\": \"X2", "FDO2028F1WK Last reset from power-on 2 Virtual Ethernet interfaces 1", "system uptime = 0 minutes Switchovers system experienced = 0", "'FTX6666ARJ9', 'vid': 'V05 ', }, }, }, 'slot': { '0':", "(fc1)', 'rtr_type': 'CISCO3945-CHASSIS', 'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at': '10:27:57 EST Mon Dec", ": 73-15476-04 Motherboard serial number : FDO202907UH Model revision number", ", VID: V03, SN: APS17070093 '''} golden_parsed_output_3 = { \"main\":", "of 1\", DESCR: \"VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0\"", "Te2/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T5\", \"vid\": \"V06 \", } },", "System returned to ROM by power-on System restarted at 05:06:40", "\"6\" }, \"version\": \"15.6(3)M2\", \"rtr_type\": \"IOSv\", \"chassis_sn\": \"9K66Z7TOKAACDEQA24N7S\", \"chassis\": \"IOSv\",", "redundancy_obj = ShowRedundancy(device=self.dev_iosv) parsed_output = redundancy_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class TestShowInventory(unittest.TestCase):", "', }, }, }, '2': { 'rp': { 'WS-C3210X-48T-S': {", "DSPs', 'name': 'PVDMII DSP SIMM with four DSPs on Slot", "supply for CISCO7604 1\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS1707008Y\",", "RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2006 by", "Forwarding Card 4 EARL sub-module of 3\", DESCR: \"WS-F6K-DFC4-A Distributed", "{ 'CISCO3825': { 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr':", "{ \"WS-C6504-E\": { \"name\": \"WS-C6504-E\", \"descr\": \"Cisco Systems Cisco 6500", "ShowPlatformHardwareSerdes(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_serdes) def", "\"name\": \"Transceiver Te2/6\", \"pid\": \"X2-10GB-SR\", \"sn\": \"FNS153920YJ\", \"vid\": \"V06 \",", "15.0(1)M7, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2011", "parsered_output = obj.parse() def test_golden(self): self.maxDiff = None self.dev =", "at 12:22:21 PDT Mon Sep 10 2018 System image file", "Rev. 2.6\" PID: WS-X6748-GE-TX , VID: V02, SN: SAL1128UPQ9 NAME:", "def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowProcessesCpuSorted(device=self.dev) with self.assertRaises(SchemaEmptyParserError):", "Mock(**self.golden_output_ios) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios) def", "applicable laws and regulations. If you are unable to comply", "= { 'main': { 'chassis': { 'CISCO3845': { 'descr': '3845", "= None self.assertEqual(parsed_output, self.golden_parsed_output_standby) def test_golden_standby_offline(self): self.device = Mock(**self.golden_output_standby_offline) obj", "2018 18:57:18 +00:00 e1000_bia.txt 2142715904 bytes total (1989595136 bytes free)", "Trunk - T1/E1 on Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1',", "power-on 2 Virtual Ethernet interfaces 1 FastEthernet interface 28 Gigabit", "self.dev = Mock(**self.empty_output) obj = ShowEnvironment(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output =", "FRU 2\" PID: CLK-7600 , VID: , SN: FXS181101V4 NAME:", "} }, }, } golden_output_2 = {'execute.return_value': ''' NAME: \"WS-C6504-E\",", "RELEASE SOFTWARE (fc1) cat6k_tb1 uptime is 10 weeks, 5 days,", "platform_obj = ShowSwitch(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch_detail(test_show_switch_detail_iosxe): def", "'datak9', }, 'ipbase': { 'license_level': 'ipbasek9', 'license_type': 'Permanent', 'next_reload_license_level': 'ipbasek9',", "image', 'last_reload_reason': 'Reload Command', 'last_reload_type': 'Normal Reload', 'license_udi': { 'device_num':", "10Gbase-SR Te2/5\" PID: X2-10GB-SR , VID: V05 , SN: AGA1515XZE2", "4 Rev. 2.0\" PID: VS-F6K-PFC4 , VID: V03, SN: SAL17163901", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_module(test_show_module_iosxe): def test_empty(self): self.dev1 =", "local laws, return this product immediately. A summary of U.S.", "Mock(**self.golden_output_c3850) platform_obj = ShowModule(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch(test_show_switch_iosxe):", "DESCR: \"Enhanced 3-slot Fan Tray 1\" PID: WS-C6503-E-FAN , VID:", ", SN: FDO123R12W NAME: \"Switch 1 - Power Supply 1\",", "\"s72033_rp\", \"version\": \"12.2(18)SXF7\", \"image_id\": \"s72033_rp-ADVENTERPRISEK9_WAN-M\", 'compiled_by': 'kellythw', 'compiled_date': 'Thu 23-Nov-06", "0.15 } }, \"five_sec_cpu_total\": 4, \"five_min_cpu\": 9, \"one_min_cpu\": 4, \"nonzero_cpu_processes\":", "with C3900-SPE150/K9 with 2027520K/69632K bytes of memory. Processor board ID", "with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(port='0/0/0') class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def test_golden_active_opm(self): self.device", "16 hours, 42 minutes Image Version = Cisco IOS Software,", "{ 'descr': 'High Density Voice Module - 8FXS/DID', 'name': 'High", "self.dev1 = Mock(**self.empty_output) inventory_obj = ShowInventory(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "on Slot 0 SubSlot 3\", DESCR: \"Two-Port Fast Ethernet High", "\"WS-F6K-DFC4-E\": { \"descr\": \"WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2\",", "= Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "Mock(**self.golden_output_7) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_7) def", "Te2/16\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170201TT\", \"vid\": \"V06 \", } },", "Forwarding Card EARL sub-module of 4\", \"pid\": \"WS-F6700-CFC\", \"sn\": \"SAL13516QS8\",", "platform_obj.parse( status='active', slot='0', iotype='ipm') def test_golden_active_ipm(self): self.maxDiff = None self.device", "ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def test_empty(self):", "'WS-C3750X-24P', 'system_image': 'flash:c3750e-universalk9-mz', 'system_restarted_at': '12:22:21 PDT Mon Sep 10 2018',", "0.07% 0.07% 0 OSPF-1 Hello '''} def test_empty(self): self.dev =", "V01 , SN: FOC91864MNN '''} golden_parsed_output_9 = { 'main': {", "self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_ios(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios)", "VS-F6K-PFC4 , VID: V03, SN: SAL17163901 NAME: \"Transceiver Te1/4\", DESCR:", "empty_output = {'execute.return_value': ''} golden_parsed_output = { \"five_sec_cpu_total\": 13, \"five_min_cpu\":", "FGL161010K8 NAME: \"Cisco Services Performance Engine 150 for Cisco 3900", "{'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\\ Directory of flash:/ '''}", "Dir,\\ ShowRedundancy,\\ ShowInventory,\\ ShowBootvar, \\ ShowProcessesCpuSorted,\\ ShowProcessesCpu,\\ ShowVersionRp,\\ ShowPlatform,\\ ShowPlatformPower,\\", "Number : CMMPP00DRB Hardware Board Revision Number : 0x05 Switch", "V01 , SN: FOC28476ADM NAME: \"16 Port 10BaseT/100BaseTX EtherSwitch on", "EARL sub-module of 4\", DESCR: \"WS-F6700-CFC Centralized Forwarding Card Rev.", "{ 'chassis': { 'CISCO2821': { 'descr': '2821 chassis', 'name': '2821", "T3/E3 WAN', 'name': 'Clear/Subrate T3/E3 WAN on Slot 1', 'pid':", "\"High Density Voice Module - 8FXS/DID on Slot 1\", DESCR:", "ready to show bootvar '''} def test_empty(self): self.dev = Mock(**self.empty_output)", "\"Two-Port Fast Ethernet High Speed WAN Interface Card on Slot", "}, \"version\": \"15.6(3)M2\", \"rtr_type\": \"IOSv\", \"chassis_sn\": \"9K66Z7TOKAACDEQA24N7S\", \"chassis\": \"IOSv\", \"image_id\":", "VID: V01 , SN: FOC98675U0D NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48", "self.device1 = Mock(**self.empty_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "golden_output_1 = {'execute.return_value': ''' CPU utilization for five seconds: 4%/0%;", "self.dev_iosv = Mock(**self.golden_output_iosv) redundancy_obj = ShowRedundancy(device=self.dev_iosv) parsed_output = redundancy_obj.parse() self.assertEqual(parsed_output,", "for compliance with U.S. and local country laws. By using", "'C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)', 'hostname':", "- T1/E1 on Slot 0 SubSlot 0\", DESCR: \"VWIC2-2MFT-T1/E1 -", "(s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)\", \"hostname\": \"cat6k_tb1\", \"uptime\": \"10", "\"WS-C6503-E-FAN 1\": { \"other\": { \"WS-C6503-E-FAN 1\": { \"name\": \"WS-C6503-E-FAN", "C3750E boot loader', 'rtr_type': 'WS-C3750X-24P', 'system_image': 'flash:c3750e-universalk9-mz', 'system_restarted_at': '12:22:21 PDT", "Emulation software. 1 Virtual Ethernet/IEEE 802.3 interface 50 Gigabit Ethernet/IEEE", "''' CPU utilization for five seconds: 4%/0%; one minute: 4%;", "\"983008\", \"processor_type\": \"R7000\", 'sp_by': 'power on', 'returned_to_rom_at': '21:57:23 UTC Sat", ", VID: V05 , SN: AGA1515XZE2 NAME: \"Transceiver Te2/6\", DESCR:", "product you agree to comply with applicable laws and regulations.", "Density Voice Module - 8FXS/DID', 'name': 'High Density Voice Module", "1 day, 16 hours, 42 minutes System returned to ROM", "interface 50 Gigabit Ethernet/IEEE 802.3 interfaces 1917K bytes of non-volatile", "import, export, distribute or use encryption. Importers, exporters, distributors and", "hours, 27 minutes', 'returned_to_rom_by': 'power-on', 'system_restarted_at': '05:06:40 GMT Tue Sep", ", VID: V05 , SN: FTX6666ARJ9 NAME: \"c3845 Motherboard with", "may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance", "1\" PID: PWR-2700-AC/4 , VID: V03, SN: APS1707008Y NAME: \"PS", "\"vid\": \"V02\", } }, \"1\": { \"X2-10GB-SR\": { \"descr\": \"X2", "Command', 'last_reload_type': 'Normal Reload', 'license_udi': { 'device_num': { '*0': {", "iotype='ipm') def test_golden_active_ipm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_ipm) platform_obj", "Revision Number : 0x05 Switch Ports Model SW Version SW", "'name': '16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2', 'pid': 'NM-16ESW',", "0 (Read/Write) License Info: License UDI: ------------------------------------------------- Device# PID SN", "Te2/6\", \"pid\": \"X2-10GB-SR\", \"sn\": \"FNS153920YJ\", \"vid\": \"V06 \", } },", "of flash-simulated non-volatile configuration memory. Base ethernet MAC Address :", "on Slot 0', 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6', 'subslot': { '3':", "Transceiver 10Gbase-SR Te2/6\" PID: X2-10GB-SR , VID: V06 , SN:", "Engine 150 for Cisco 3900 ISR on Slot 0\", DESCR:", "DESCR: \"High Speed Fan Module for CISCO7604 1\" PID: FAN-MOD-4HS", "}, \"2\": { \"lc\": { \"WS-X6748-GE-TX\": { \"name\": \"2\", \"descr\":", "show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device)", "self.maxDiff = None self.device = Mock(**self.golden_output_active_opm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device)", "non-volatile configuration': '512'}, 'next_reload_license_level': 'ipservices', 'number_of_intfs': {'Gigabit Ethernet': '28', 'Ten", "'vid': '1.0', }, }, }, } golden_output_iosv = {'execute.return_value': '''\\", "test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformPower(device=self.device)", ", VID: V04, SN: SAL171848KL NAME: \"4\", DESCR: \"WS-X6748-GE-TX CEF720", "ShowProcessesCpuHistory(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def test_golden(self):", "PID: WS-X6816-10GE , VID: V02, SN: SAL17152QB3 NAME: \"WS-F6K-DFC4-E Distributed", "} golden_output_iosv = {'execute.return_value': '''\\ BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE", "= { 'main': { 'chassis': { 'CISCO3825': { 'descr': '3825", "Base ethernet MAC Address : 84:3D:C6:FF:F1:B8 Motherboard assembly number :", "self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_module(test_show_module_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj", "3 Rev. 1.8\", \"name\": \"switching engine sub-module of 1\", \"pid\":", "{ \"WS-X6748-GE-TX\": { \"name\": \"4\", \"descr\": \"WS-X6748-GE-TX CEF720 48 port", "Mock(**self.golden_output_active) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='active', status='running') self.maxDiff =", "1.0\", \"pid\": \"WS-X6824-SFP\", \"vid\": \"V01\", \"sn\": \"SAL17152EG9\", \"subslot\": { \"0\":", "0 } golden_output = {'execute.return_value': '''\\ show processes cpu sorted", "\"one_min_cpu\": 0.07, \"process\": \"OSPF-1 Hello\", \"five_min_cpu\": 0.07, \"runtime\": 113457, \"pid\":", "as test_show_switch_detail_iosxe class TestShowVersion(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv')", "Type Next reboot ------------------------------------------------------------------ ipbase ipbasek9 Permanent ipbasek9 security securityk9", "Operating System Software IOS (tm) s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7,", "by power cycle at 21:57:23 UTC Sat Aug 28 2010", "chassis\" PID: CISCO3845 , VID: V05 , SN: FTX6666ARJ9 NAME:", "Module - 8FXS/DID', 'name': 'High Density Voice Module - 8FXS/DID", "(fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2011 by Cisco Systems,", "\"name\": \"WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of 3\",", "'''} golden_parsed_output_1 = { \"sort\": { 1: { \"invoked\": 3321960,", "= {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\\ ROM: Bootstrap program", "\"DCH183500KW\", } } }, \"PS 1 PWR-1400-AC\": { \"other\": {", "VID: V01, SN: SAL17152EG9 NAME: \"WS-F6K-DFC4-A Distributed Forwarding Card 4", "Gigabit Ethernet on Slot 0', 'pid': 'CISCO3845-MB', 'sn': 'FOC729346GQ', 'vid':", "\"curr_sw_state\": \"ACTIVE\" } } } golden_output_iosv = {'execute.return_value': '''\\ Redundant", "self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj", "= obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_empty(self): self.device1", ", SN: ONT182746GZ NAME: \"1\", DESCR: \"WS-C1010XR-48FPS-I\" PID: WS-C1010XR-48FPS-I, VID:", "\"V02\", \"sn\": \"SAL17152QB3\", \"subslot\": { \"0\": { \"WS-F6K-DFC4-E\": { \"descr\":", "AIM Element\" PID: AIM-VPN/SSL-2 , VID: V01, SN: FOC2837465E '''}", "minute: 23%; five minutes: 15% '''} golden_parsed_output_1 = { \"sort\":", "assembly number : 73-15476-04 Motherboard serial number : FDO202907UH Model", "'sn': 'FOC729346GQ', 'vid': 'V09 ', }, }, 'other': { 'AIM-VPN/SSL-3':", "Mock(**self.empty_output) platform_obj = ShowSwitch(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def", "Multiflex Trunk - T1/E1 on Slot 0 SubSlot 1', 'pid':", "port 10/100/1000mb Ethernet Rev. 3.4\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V04\", \"sn\":", "'V01 ', }, }, }, }, }, }, }, }", "self.assertEqual(parsed_output, self.golden_parsed_output_1) class test_show_processes_cpu(test_show_processes_cpu_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) obj", "\"index\": \"264\", \"size\": \"0\", \"permissions\": \"drw-\" }, \"nvram\": { \"last_modified_date\":", "\"vid\": \"V06 \", } }, \"4\": { \"X2-10GB-SR\": { \"descr\":", "image file is \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\" This product contains cryptographic features and", "\"WS-SUP720\": { \"descr\": \"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\", \"name\": \"msfc", "{ \"PS 1 PWR-2700-AC/4\": { \"name\": \"PS 1 PWR-2700-AC/4\", \"descr\":", "\"sn\": \"SAL171848KL\", \"vid\": \"V04\", } } }, } } },", "17 2018 18:57:10 +00:00\", \"index\": \"268\", \"size\": \"524288\", \"permissions\": \"-rw-\"", "to <EMAIL>. cisco WS-C6503-E (R7000) processor (revision 1.4) with 983008K/65536K", "TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\\ ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\\ TestShowPlatformHardwareQfpStatisticsDrop", "T3/E3 WAN on Slot 1\", DESCR: \"Clear/Subrate T3/E3 WAN\" PID:", "number : WS-C3750X-24P-L Daughterboard assembly number : 800-32727-03 Daughterboard serial", "platform_obj.parse() def test_golden_c3850(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj", "parsed_output = obj.parse(key_word='CPU', sort_time='5min') self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff =", "SOFTWARE (fc1)\", \"bootldr\": \"s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE", "(fc1) BOOTLDR: s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)", "Power Supply 1\", DESCR: \"C3900 AC Power Supply 1\" PID:", "Private Network (VPN) Module on Slot 0\", DESCR: \"Encryption AIM", "'IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0', 'name': 'IOSv',", "FDO2028F1WK Top Assembly Part Number : 800-38990-01 Top Assembly Revision", "NAME: \"1\", DESCR: \"WS-C8888X-88\" PID: WS-C0123X-45T-S , VID: V00 ,", "'sn': 'FD232323XXZ', 'vid': 'V02 ', }, 'PWR-C2-2929WAC': { 'descr': 'LLL", "'1': { 'lc': { 'NM-1T3/E3=': { 'descr': 'Clear/Subrate T3/E3 WAN',", "register is 0x2102 '''} golden_output_ios_1 = {'execute.return_value': '''\\ Cisco IOS", "VID: V05 , SN: AGA1515XZE2 NAME: \"Transceiver Te2/6\", DESCR: \"X2", "''} semi_empty_output = {'execute.return_value': '''\\ ROM: Bootstrap program is IOSv", "\"Transceiver Te1/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702033D\", \"vid\": \"V06 \", }", "\"Wan Interface Card BRI U (2091, 3086) on Slot 0", "= obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_serdes) def test_empty(self): self.device1", "{ 'descr': 'c3845 Motherboard with Gigabit Ethernet', 'name': 'c3845 Motherboard", "ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm)", "Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)', 'hostname': 'sample_switch',", "Info: License UDI: ------------------------------------------------- Device# PID SN ------------------------------------------------- *0 C3900-SPE150/K9", "00:00:00 +00:00 boot 264 drw- 0 Oct 14 2013 00:00:00", "\"V03\", \"sn\": \"APS17070093\", } } }, \"1\": { \"rp\": {", "with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def test_golden_serdes(self):", "{ 'descr': '3845 chassis', 'name': '3845 chassis', 'pid': 'CISCO3845', 'sn':", "NAME: \"4\", DESCR: \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev.", "''' NAME: \"3825 chassis\", DESCR: \"3825 chassis\" PID: CISCO3825 ,", "V05, SN: SAL17152N0F NAME: \"msfc sub-module of 1\", DESCR: \"VS-F6K-MSFC5", "obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_2) def test_golden_output_3(self):", "SubSlot 4', 'pid': 'PVDM2-64', 'sn': 'FOC63358WSI', 'vid': 'V01 ', },", "12.2(18)SXF7, RELEASE SOFTWARE (fc1) cat6k_tb1 uptime is 10 weeks, 5", "\"five_sec_cpu_total\": 13, \"five_min_cpu\": 15, \"one_min_cpu\": 23, \"five_sec_cpu_interrupts\": 0 } golden_output", "1\" PID: PWR-1400-AC , VID: V01, SN: ABC0830J127 '''} golden_output_4", "\"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/4\", \"name\": \"Transceiver Te2/4\",", "e1000 412 113457 116196 976 0.15% 0.07% 0.07% 0 OSPF-1", "800-38990-01 Top Assembly Revision Number : F0 Version ID :", "test_golden_output_9(self): self.maxDiff = None self.device = Mock(**self.golden_output_9) obj = ShowInventory(device=self.device)", "Slot 1\", DESCR: \"Clear/Subrate T3/E3 WAN\" PID: NM-1T3/E3= , VID:", "\"five_sec_cpu\": 1.03 }, 2: { \"invoked\": 1466728, \"usecs\": 2442, \"tty\":", "0x2102 '''} parsed_output = { 'version': { 'chassis': 'CISCO3945-CHASSIS', 'chassis_sn':", "'GLC-SX-MMD', 'sn': 'ACW102938VS', 'vid': 'V01 ', }, }, }, 'vid':", "'vid': 'V03 ', }, }, }, 'vid': 'V00 ', },", "SubSlot 1\", DESCR: \"Six port FXO voice interface daughtercard\" PID:", "\"name\": \"1\", \"descr\": \"VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE", "= Mock(**self.golden_output_1) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse(key_word='process') self.assertEqual(parsed_output, self.golden_parsed_output_1)", "= Mock(**self.golden_output_port) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(port='0/0/0') self.maxDiff =", "1.4) with 983008K/65536K bytes of memory. Processor board ID FXS1821Q2H9", "AC Power Supply 1\", DESCR: \"C3900 AC Power Supply 1\"", "self.golden_parsed_output_active) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardware(device=self.device1) with", ": FDO1633Q14S Top Assembly Part Number : 800-33746-04 Top Assembly", "ShowEnvironment(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_module(test_show_module_iosxe): def test_empty(self):", "self.dev1 = Mock(**self.empty_output) platform_obj = ShowPlatform(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "\"WS-C6503-E\", \"chassis_sn\": \"FXS1821Q2H9\", \"last_reload_reason\": \"s/w reset\", 'processor_board_flash': '65536K', \"number_of_intfs\": {", "800-32727-03 Daughterboard serial number : FDO172217ED System serial number :", "Mode = Simplex Maintenance Mode = Disabled Communications = Down", "116196, \"usecs\": 976, \"tty\": 0, \"one_min_cpu\": 0.07, \"process\": \"OSPF-1 Hello\",", "Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(", "version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios) def test_golden_ios_cat6k(self):", "Image text-base: 0x00003000, data-base: 0x02800000 ROM: Bootstrap program is C3750E", "L2 Cache Last reset from s/w reset SuperLAT software (copyright", "= ShowRedundancy(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = redundancy_obj.parse() def test_golden_iosv(self): self.maxDiff", "9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0', 'name': 'IOSv', 'pid': 'IOSv', 'sn': '9K66Z7TOKAACDEQA24N7S',", "3.0.0. Bridging software. TN3270 Emulation software. 1 Virtual Ethernet/IEEE 802.3", "+00:00 boot 264 drw- 0 Oct 14 2013 00:00:00 +00:00", "'V03 ', }, }, }, 'vid': 'V04 ', }, },", "None self.dev = Mock(**self.golden_output) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse(key_word='CPU',", "\"C3900 AC Power Supply 1\" PID: PWR-3900-AC , VID: V03", "with 262144K bytes of memory. Processor board ID FDO2028F1WK Last", "\"Transceiver Te2/1\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/1\" PID: X2-10GB-SR ,", "of ATA System CompactFlash 0 (Read/Write) License Info: License UDI:", "Rev. 2.0\" PID: WS-X6816-10GE , VID: V02, SN: SAL17152QB3 NAME:", "Slot 0', 'pid': 'AIM-VPN/SSL-2', 'sn': 'FOC2837465E', 'vid': 'V01', 'subslot': {", "17 2018 18:57:18 +00:00\", \"index\": \"269\", \"size\": \"119\", \"permissions\": \"-rw-\"", "= Mock(**self.golden_output_slot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff =", "Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 12.2(55)SE8, RELEASE SOFTWARE", "'IOS', 'platform': 'C3900', 'processor_board_flash': '2000880K', 'processor_type': 'C3900-SPE150/K9', 'returned_to_rom_at': '10:26:47 EST", "test_semi_empty(self): self.dev2 = Mock(**self.semi_empty_output) platform_obj = ShowPlatform(device=self.dev2) with self.assertRaises(SchemaEmptyParserError): parsed_output", "'name': 'Switch 1 - Power Supply 1', 'pid': 'C3KX-PWR-350WAC', 'sn':", "''' NAME: \"1\", DESCR: \"WS-C8888X-88\" PID: WS-C0123X-45T-S , VID: V00", "} }, \"2\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR", "sub-module of 2\", DESCR: \"WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev.", "test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev)", "SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC65428K9F', 'vid': 'V01 ', },", "}, \"bytes_total\": \"2142715904\", \"bytes_free\": \"1989595136\" }, \"dir\": \"flash0:/\" } }", "self.assertEqual(parsed_output, self.golden_parsed_output_3) def test_golden_output_4(self): self.maxDiff = None self.device = Mock(**self.golden_output_4)", "10Gbase-SR Te1/4\" PID: X2-10GB-SR , VID: V06 , SN: ONT170202T1", "'main_mem': '524288', 'processor_type': 'PowerPC405', 'rtr_type': 'WS-C3750X-24S', 'chassis_sn': 'FDO1633Q14S', 'number_of_intfs': {", "= version_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) version_obj = ShowVersion(device=self.dev1)", "None self.device = Mock(**self.golden_output_active_opm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output =", "FXS1821Q2H9 NAME: \"CLK-7600 1\", DESCR: \"OSR-7600 Clock FRU 1\" PID:", "Ports Model SW Version SW Image ------ ----- ----- ----------", "'name': 'Two-Port Fast Ethernet High Speed WAN Interface Card on", "ShowProcessesCpuSorted(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff =", "non-volatile configuration': '512' }, 'curr_config_register': '0xF' } } device_output =", "self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output", "IOSv\", \"uptime\": \"1 day, 16 hours, 42 minutes\" } }", "0x40101040, data-base: 0x42D98000 ROM: System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE", "'SFP-10G-LR', 'sn': 'ONT182746GZ', 'vid': 'V02 ', }, }, }, 'vid':", "'V01 ', }, }, }, }, }, }, '1': {", "self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output", "\"IOSv\", \"image_type\": \"production image\", 'processor_board_flash': '10080K', 'returned_to_rom_by': 'reload', \"main_mem\": \"435457\",", "SN: AGA1515XZE2 NAME: \"Transceiver Te2/6\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/6\"", "ShowRedundancy,\\ ShowInventory,\\ ShowBootvar, \\ ShowProcessesCpuSorted,\\ ShowProcessesCpu,\\ ShowVersionRp,\\ ShowPlatform,\\ ShowPlatformPower,\\ ShowProcessesCpuHistory,\\", "FOC16062824 NAME: \"C3900 AC Power Supply 1\", DESCR: \"C3900 AC", "\"VIOS-ADVENTERPRISEK9-M\", 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 29-Mar-17 14:05', \"processor_type\": \"revision 1.0\",", "cisco WS-C6503-E (R7000) processor (revision 1.4) with 983008K/65536K bytes of", "\"1\", \"size\": \"0\", \"permissions\": \"drw-\" }, \"vios-adventerprisek9-m\": { \"last_modified_date\": \"Mar", "\"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\" } golden_output_iosv = {'execute.return_value': '''\\ BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12;", "0\", DESCR: \"c3845 Motherboard with Gigabit Ethernet\" PID: CISCO3845-MB ,", "}, }, 'C3900 AC Power Supply 1': { 'other': {", "days, 2 hours, 3 minutes', 'version': '12.2(55)SE8', 'version_short': '12.2' }", "Dec 9 2019', 'returned_to_rom_by': 'reload', 'rom': 'System Bootstrap, Version 15.0(1r)M13,", "ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_1) def test_golden_ios_2(self): self.maxDiff =", "'securityk9', 'license_type': 'Permanent', 'next_reload_license_level': 'securityk9', }, 'uc': { 'license_level': 'None',", "'15.0', }, } def test_empty(self): self.dev1 = Mock(**self.empty_output) version_obj =", "= { \"version\": { \"os\": \"IOS\", \"version_short\": \"12.2\", \"platform\": \"s72033_rp\",", "'name': 'Virtual Private Network (VPN) Module on Slot 0', 'pid':", "', }, }, }, 'vid': 'V01 ', }, }, },", "65536K bytes of Flash internal SIMM (Sector size 512K). Configuration", "50 Gigabit Ethernet/IEEE 802.3 interfaces 1917K bytes of non-volatile configuration", "0 SubSlot 3', 'pid': 'HWIC-2FE', 'sn': 'FOC16062824', 'vid': 'V02 ',", "SN: 9K66Z7TOKAACDEQA24N7S '''} golden_parsed_output_2 = { \"main\": { \"chassis\": {", "\"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\", } } }, \"CLK-7600", "}, '1/1/1': { 'SFP-10G-SR': { 'descr': 'SFP-10GBase-SR', 'name': 'TenGigabitEthernet1/1/1', 'pid':", "RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0\",", "self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k) def test_golden_ios_1(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_1)", "'main': { 'chassis': { 'IOSv': { 'descr': 'IOSv chassis, Hw", "Cisco 3900 ISR', 'name': 'Cisco Services Performance Engine 150 for", "self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "PID: X2-10GB-SR , VID: V06 , SN: ONT170202UU NAME: \"Transceiver", "hours, 42 minutes Image Version = Cisco IOS Software, IOSv", "VS-F6K-MSFC5 , VID: , SN: SAL17142D06 NAME: \"VS-F6K-PFC4 Policy Feature", "None None data datak9 Permanent datak9 Configuration register is 0x2102", "self.dev_asr1k = Mock(**self.golden_output_asr1k) platform_obj = ShowPlatform(device=self.dev_asr1k) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,", "}, 'mem_size': { 'flash-simulated non-volatile configuration': '512' }, 'curr_config_register': '0xF'", "'Gigabit(1000BaseT) module for EtherSwitch NM', 'name': 'Gigabit(1000BaseT) module for EtherSwitch", "VID: V01 , SN: ACW102938VS '''} golden_parsed_output_4 = { 'slot':", "are responsible for compliance with U.S. and local country laws.", "48 port 10/100/1000mb Ethernet Rev. 2.6\" PID: WS-X6748-GE-TX , VID:", "'sn': 'FOC63358WSI', 'vid': 'V01 ', }, }, }, }, },", "= platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850)", "on Slot 2', 'pid': 'NM-16ESW', 'sn': 'FOC135464KO', 'subslot': { '0':", "DSP SIMM with four DSPs on Slot 0 SubSlot 4\",", "Te2/16\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/16\" PID: X2-10GB-SR , VID:", "Version 12.2(58r)SE, RELEASE SOFTWARE (fc1) sample_switch uptime is 8 weeks,", "export, transfer and use. Delivery of Cisco cryptographic products does", "\"version\": \"12.2(18)SXF7\", \"image_id\": \"s72033_rp-ADVENTERPRISEK9_WAN-M\", 'compiled_by': 'kellythw', 'compiled_date': 'Thu 23-Nov-06 06:26',", "SAL11434N9G NAME: \"switching engine sub-module of 1\", DESCR: \"WS-F6K-PFC3BXL Policy", "TestShowSwitchDetail as test_show_switch_detail_iosxe class TestShowVersion(unittest.TestCase): dev1 = Device(name='empty') dev_iosv =", "hours, 27 minutes System returned to ROM by power-on System", "V05 , SN: FTX7908A3RQ NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex", "'os': 'IOS', 'image_type': 'production image', 'compiled_date': 'Mon 22-Jan-18 04:07', 'compiled_by':", "{ 'descr': 'IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0',", "Fast Ethernet High Speed WAN Interface Card on Slot 0", "self.maxDiff = None self.device = Mock(**self.golden_output_7) obj = ShowInventory(device=self.device) parsed_output", "\"vid\": \"V06 \", } }, \"3\": { \"X2-10GB-SR\": { \"descr\":", "golden_parsed_output_8 = { 'main': { 'chassis': { 'CISCO3825': { 'descr':", "'chassis': { 'CISCO2821': { 'descr': '2821 chassis', 'name': '2821 chassis',", "{ \"invoked\": 116196, \"usecs\": 976, \"tty\": 0, \"one_min_cpu\": 0.07, \"process\":", "'C3900 AC Power Supply 1', 'pid': 'PWR-3900-AC', 'sn': 'QCS1604P0BT', 'vid':", "\"name\": \"Transceiver Te1/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702033D\", \"vid\": \"V06 \",", "APS17070093 '''} golden_parsed_output_3 = { \"main\": { \"chassis\": { \"WS-C6503-E\":", "reset\", 'processor_board_flash': '65536K', \"number_of_intfs\": { \"Gigabit Ethernet/IEEE 802.3\": \"50\", 'Virtual", "(fc1) cat6k_tb1 uptime is 10 weeks, 5 days, 5 hours,", "= Mock(**self.golden_output_iosv) redundancy_obj = ShowRedundancy(device=self.dev_iosv) parsed_output = redundancy_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv)", "Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team ROM: Bootstrap program", "(Read/Write) 0K bytes of ATA CompactFlash 1 (Read/Write) 0K bytes", "\"vid\": \"V03\", \"sn\": \"APS1707008Y\", } } }, \"PS 2 PWR-2700-AC/4\":", "Process 368 362874 3321960 109 1.03% 0.54% 0.48% 0 PIM", "NAME: \"Cisco Services Performance Engine 150 for Cisco 3900 ISR", "3900 ISR\" PID: C3900-SPE150/K9 , VID: V05 , SN: FOC16050QP6", "on Slot 0\", DESCR: \"Encryption AIM Element\" PID: AIM-VPN/SSL-3 ,", "{ 'flash-simulated non-volatile configuration': '512' }, 'curr_config_register': '0xF' } }", "self.golden_parsed_output_9) class test_show_bootvar(unittest.TestCase): dev = Device(name='ios') dev_iosv = Device(name='iosv') empty_output", "NAME: \"High Density Voice Module - 8FXS/DID on Slot 1\",", "'None', 'next_reload_license_level': 'None', }, }, 'main_mem': '2027520', 'mem_size': { 'non-volatile", "T1/E1 on Slot 0 SubSlot 0\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port", "'V01 ', }, }, }, }, } def test_empty(self): self.dev1", "FlexStackPlus Module\", DESCR: \"Stacking Module\" PID: C1010X-STACK , VID: V02", "0 Oct 14 2013 00:00:00 +00:00 config 267 -rw- 147988420", "flash-simulated non-volatile configuration memory. Base ethernet MAC Address : AC:F2:C5:FF:55:E7", "'license_level': 'ipbasek9', 'license_type': 'Permanent', 'next_reload_license_level': 'ipbasek9', }, 'security': { 'license_level':", "Mock(**self.golden_output_active_ipm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0',", "\"slot\": { \"slot 0\": { \"image_ver\": \"Cisco IOS Software, IOSv", "def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowProcessesCpuHistory(device=self.device) with self.assertRaises(SchemaEmptyParserError):", "'C3KX-PWR-350WAC', 'sn': 'DTN1504L0E9', 'vid': 'V01D ', }, }, '1/1/1': {", "System image file is \"flash:c3750e-universalk9-mz\" This product contains cryptographic features", "V04 , SN: FOC65798TG8 NAME: \"Six port FXO voice interface", "2 FastEthernet interfaces 3 Gigabit Ethernet interfaces 1 Virtual Private", "test_show_env(test_show_env_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowEnvironment(device=self.dev) with", "Motherboard with Gigabit Ethernet', 'name': 'c3845 Motherboard with Gigabit Ethernet", "PID: HWIC-2FE , VID: V02 , SN: FOC16062824 NAME: \"C3900", "'sn': 'FOC28476ADM', 'vid': 'V01 ', }, }, }, '16': {", "Services Performance Engine 150 for Cisco 3900 ISR', 'name': 'Cisco", "SN: FOC16050QP6 NAME: \"Two-Port Fast Ethernet High Speed WAN Interface", "= ShowPlatformHardwareSerdes(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def", "FastEthernet interface 28 Gigabit Ethernet interfaces 2 Ten Gigabit Ethernet", "\"Oct 17 2018 18:57:10 +00:00\", \"index\": \"268\", \"size\": \"524288\", \"permissions\":", "returned to ROM by power cycle at 21:57:23 UTC Sat", "non-volatile configuration memory. Base ethernet MAC Address : AC:F2:C5:FF:55:E7 Motherboard", "obj.parse() class test_show_version_rp(test_show_version_rp_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj =", "- Power Supply 1\", DESCR: \"BCA Power Supply\" PID: C3KX-PWR-007CBA", "assembly number : 73-13061-04 Motherboard serial number : FDO1633Q14M Model", "\"descr\": \"Cisco Systems Cisco 6500 4-slot Chassis System\", \"pid\": \"WS-C6504-E\",", "{ \"descr\": \"WS-F6700-CFC Centralized Forwarding Card Rev. 4.1\", \"name\": \"WS-F6700-CFC", "test_show_redundancy(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value':", "\"vid\": \"\", \"sn\": \"FXS181101V4\", } } }, \"1\": { \"rp\":", "\"Transceiver Te2/3\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202UU\", \"vid\": \"V06 \", }", "Directory of flash0:/ 1 drw- 0 Jan 30 2013 00:00:00", "Policy Feature Card 4 EARL sub-module of 1\", \"pid\": \"VS-F6K-PFC4\",", "AC Power Supply 1', 'name': 'C3900 AC Power Supply 1',", "3 Rev. 1.1\", \"name\": \"switching engine sub-module of 2\", \"pid\":", "{ \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/4\", \"name\": \"Transceiver", "2010', 'returned_to_rom_by': 'power cycle', \"rtr_type\": \"WS-C6503-E\", \"chassis_sn\": \"FXS1821Q2H9\", \"last_reload_reason\": \"s/w", "<EMAIL>. License Level: ipservices License Type: Permanent Next reload license", "Last reload reason: Reload command This product contains cryptographic features", "{ 'descr': 'WS-C3210X-48', 'name': '2', 'pid': 'WS-C3210X-48T-S', 'sn': 'FD5678Z90P', 'subslot':", "\"ONT1702033D\", \"vid\": \"V06 \", } }, }, } } },", "test_golden_output_4(self): self.maxDiff = None self.device = Mock(**self.golden_output_4) obj = ShowInventory(device=self.device)", "DESCR: \"VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS", "None self.device = Mock(**self.golden_output_7) obj = ShowInventory(device=self.device) parsed_output = obj.parse()", "'14', 'FastEthernet': '1', 'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2'", "self.assertEqual(parsed_output, self.golden_parsed_output) def test_empty(self): self.device1 = Mock(**self.empty_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1)", "42 minutes\", \"config_register\": \"0x0\", \"curr_sw_state\": \"ACTIVE\" } } } golden_output_iosv", "parsed_output = obj.parse() class test_show_platform(test_show_platform_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output)", "= ShowProcessesCpu(device=self.device) parsed_output = obj.parse(key_word='process') self.assertEqual(parsed_output, self.golden_parsed_output_1) def test_empty(self): self.device1", "1.03 }, 2: { \"invoked\": 1466728, \"usecs\": 2442, \"tty\": 0,", "is \"flash0:/vios-adventerprisek9-m\" Last reload reason: Unknown reason This product contains", "is IOSv N95_1 uptime is 1 day, 16 hours, 42", "\"sn\": \"SAL17163901\", \"vid\": \"V03\", }, }, \"4\": { \"X2-10GB-SR\": {", "= Mock(**self.golden_output_active) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) parsed_output = platform_obj.parse(status='active') self.assertEqual(parsed_output,", "Transceiver 10Gbase-SR Te2/2\", \"name\": \"Transceiver Te2/2\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702020H\",", "= ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(subslot='0/1') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_subslot)", "VID: V06 , SN: ONT170202T5 NAME: \"Transceiver Te2/5\", DESCR: \"X2", "{ \"main\": { \"chassis\": { \"WS-C6504-E\": { \"name\": \"WS-C6504-E\", \"descr\":", "\"one_min_cpu\": 4, \"nonzero_cpu_processes\": [ \"PIM Process\", \"IOSv e1000\", \"OSPF-1 Hello\"", "{ \"other\": { \"PS 1 PWR-1400-AC\": { \"name\": \"PS 1", "\"SAL171846RF\", \"vid\": \"V02\", } }, \"1\": { \"X2-10GB-SR\": { \"descr\":", "{ \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/2\", \"name\": \"Transceiver", "PID: VS-F6K-MSFC5 , VID: , SN: SAL17142D06 NAME: \"VS-F6K-PFC4 Policy", "Ethernet interfaces The password-recovery mechanism is enabled. 512K bytes of", "= { \"sort\": { 1: { \"invoked\": 3321960, \"usecs\": 109,", "Communications = Down Reason: Failure Current Processor Information : -------------------------------", "<EMAIL>. Cisco CISCO3945-CHASSIS (revision 1.1) with C3900-SPE150/K9 with 2027520K/69632K bytes", ", VID: V06 , SN: FNS153920YJ NAME: \"Transceiver Te2/16\", DESCR:", "self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowModule(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850)", "Maintenance Mode = Disabled Communications = Down Reason: Failure Current", "DESCR: \"Six port FXO voice interface daughtercard\" PID: EM-HDA-6FXO ,", "= Mock(**self.golden_output_standby) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='standby', status='running') self.maxDiff", "', }, }, }, 'slot': { '0': { 'rp': {", "ShowRedundancy(device=self.dev_iosv) parsed_output = redundancy_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class TestShowInventory(unittest.TestCase): dev1 =", "self.dev = Mock(**self.empty_output) platform_obj = ShowBootvar(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "\"name\": \"WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4\", \"pid\":", "SubSlot 1', 'pid': 'WIC-1B-U-V2', 'sn': '10293847', 'vid': 'V01', }, },", "'pid': 'IOSv', 'sn': '9K66Z7TOKAACDEQA24N7S', 'vid': '1.0', }, }, }, }", "= ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1) class test_show_processes_cpu(test_show_processes_cpu_iosxe): def", "2013 00:00:00 +00:00\", \"index\": \"264\", \"size\": \"0\", \"permissions\": \"drw-\" },", "\"dir\": \"flash0:/\" } } golden_output_iosv = {'execute.return_value': '''\\ Directory of", "of 1\", DESCR: \"WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8\"", "\"WS-X6816-10GE\": { \"name\": \"2\", \"descr\": \"WS-X6816-10GE CEF720 16 port 10GE", "= obj.parse(port='0/0/0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_port) def test_golden_slot(self): self.device", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_2) def test_golden_output_3(self): self.maxDiff = None", "Trunk - T1/E1 on Slot 0 SubSlot 0\", DESCR: \"VWIC2-2MFT-T1/E1", "SN: SAL13516QS8 NAME: \"FAN-MOD-4HS 1\", DESCR: \"High Speed Fan Module", "platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev = Device(name='c3850') empty_output =", "VID: V03 , SN: FOC85389QXB '''} golden_parsed_output_8 = { 'main':", "}, }, } golden_output_iosv = {'execute.return_value': '''\\ NAME: \"IOSv\", DESCR:", "1\", DESCR: \"C3900 AC Power Supply 1\" PID: PWR-3900-AC ,", "'Mon 22-Jan-18 04:07', 'compiled_by': 'prod_rel_team', 'rom': 'Bootstrap program is C3750E", "CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6\" PID: WS-X6748-GE-TX ,", "TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\\ TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\\ ShowPlatformHardwareQfpInterface", "\"X2-10GB-SR\", \"sn\": \"ONT170202UU\", \"vid\": \"V06 \", } }, \"4\": {", "\"name\": \"PS 2 PWR-2700-AC/4\", \"descr\": \"2700W AC power supply for", "}, \"1\": { \"rp\": { \"VS-SUP2T-10G\": { \"name\": \"1\", \"descr\":", "'other': { 'AIM-VPN/SSL-2': { 'descr': 'Encryption AIM Element', 'name': 'Virtual", "Down Reason: Failure Current Processor Information : ------------------------------- Active Location", "self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitchDetail(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850)", "of 3\", \"pid\": \"WS-F6K-DFC4-A\", \"sn\": \"SAL171848KL\", \"vid\": \"V04\", } }", "Processor board ID FDO2028F1WK Last reset from power-on 2 Virtual", "as test_show_platform_hardware_serdes_statistics_internal_iosxe,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\\ ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\\ TestShowPlatformHardwareQfpStatisticsDrop as", "V07 , SN: FTX1234AMWT NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex", "T1/E1 on Slot 0 SubSlot 1', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675W3E',", "0 SubSlot 4\", DESCR: \"PVDMII DSP SIMM with four DSPs\"", "Mock(**self.golden_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet", "'EVM-HD-8FXS/DID': { 'descr': 'High Density Voice Module - 8FXS/DID', 'name':", "\"descr\": \"X2 Transceiver 10Gbase-SR Te2/1\", \"name\": \"Transceiver Te2/1\", \"pid\": \"X2-10GB-SR\",", "= None self.dev_iosv = Mock(**self.golden_output_ios_cat6k) version_obj = ShowVersion(device=self.dev_iosv) parsed_output =", "'vid': 'V01 ', }, }, }, }, }, }, },", "{ \"descr\": \"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\", \"name\": \"msfc sub-module", "= ShowEnvironment(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_module(test_show_module_iosxe): def", "of 1\", DESCR: \"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\" PID: WS-SUP720", "password-recovery mechanism is enabled. 512K bytes of flash-simulated non-volatile configuration", "\"16\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/16\", \"name\":", "}, }, 'vid': 'V01 ', }, }, }, }, }", "parsed_output = obj.parse(key_word='process') self.assertEqual(parsed_output, self.golden_parsed_output_1) def test_empty(self): self.device1 = Mock(**self.empty_output)", "'curr_config_register': '0x2102', 'hostname': 'best-c3945-IOS3', 'image_id': 'C3900-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason':", "parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def test_golden_active_ipm(self): self.device =", "Interface Card BRI U (2091, 3086) on Slot 0 SubSlot", "U.S. laws governing Cisco cryptographic products may be found at:", "Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv =", "Te2/2\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702020H\", \"vid\": \"V06 \", } },", "\"pid\": \"X2-10GB-SR\", \"sn\": \"ONT17020338\", \"vid\": \"V06 \", } }, \"2\":", "}, \"system_image\": \"flash0:/vios-adventerprisek9-m\", \"curr_config_register\": \"0x0\", \"rom\": \"Bootstrap program is IOSv\",", "ShowPlatform(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_c3850) def test_golden_asr1k(self): self.maxDiff =", "} }, \"2\": { \"lc\": { \"WS-X6748-GE-TX\": { \"name\": \"2\",", "self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1)", "+00:00 vios-adventerprisek9-m 268 -rw- 524288 Oct 17 2018 18:57:10 +00:00", "= None self.dev_iosv = Mock(**self.golden_output_iosv) dir_obj = Dir(device=self.dev_iosv) parsed_output =", "VID: V01, SN: SAL11434LYG NAME: \"2\", DESCR: \"WS-X6748-GE-TX CEF720 48", "golden_output_9 = {'execute.return_value': ''' NAME: \"3845 chassis\", DESCR: \"3845 chassis\"", "Systems, Inc. Compiled Wed 26-Jun-13 09:56 by prod_rel_team Image text-base:", "\"processor_type\": \"R7000\", 'sp_by': 'power on', 'returned_to_rom_at': '21:57:23 UTC Sat Aug", "'WS-C3210X-48', 'name': '2', 'pid': 'WS-C3210X-48T-S', 'sn': 'FD5678Z90P', 'subslot': { '2':", "Next reload license Level: ipservices cisco WS-C3750X-24S (PowerPC405) processor (revision", "show processes cpu sorted 5min | inc CPU CPU utilization", "} }, \"FAN-MOD-4HS 1\": { \"other\": { \"FAN-MOD-4HS 1\": {", "\"PS 1 PWR-2700-AC/4\", DESCR: \"2700W AC power supply for CISCO7604", "}, \"config\": { \"last_modified_date\": \"Oct 14 2013 00:00:00 +00:00\", \"index\":", "\"vid\": \"V02\", \"sn\": \"SAL17152QB3\", \"subslot\": { \"0\": { \"WS-F6K-DFC4-E\": {", "} } }, \"2\": { \"lc\": { \"WS-X6748-GE-TX\": { \"name\":", "'non-volatile configuration': '255', }, 'number_of_intfs': { 'FastEthernet': '2', 'Gigabit Ethernet':", "from s/w reset SuperLAT software (copyright 1990 by Meridian Technology", "platform_obj = ShowPlatformPower(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self):", "T1/E1\" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675U0D NAME:", "'PWR-C2-2929WAC', 'sn': 'LIT03728KKK', 'vid': 'V02L ', }, }, '1/0/49': {", "\"PS 2 PWR-2700-AC/4\", DESCR: \"2700W AC power supply for CISCO7604", "\"VS-SUP2T-10G\": { \"name\": \"1\", \"descr\": \"VS-SUP2T-10G 5 ports Supervisor Engine", "Transceiver 10Gbase-SR Te1/5\", \"name\": \"Transceiver Te1/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702033D\",", "cpu_platform_obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_empty(self): self.device1 =", "\"usecs\": 109, \"tty\": 0, \"one_min_cpu\": 0.54, \"process\": \"PIM Process\", \"five_min_cpu\":", "def test_golden_port(self): self.device = Mock(**self.golden_output_port) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output =", "'reload', \"main_mem\": \"435457\", \"mem_size\": { \"non-volatile configuration\": \"256\" }, \"system_image\":", "'V01 ', }, }, '1': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1", "VID: V06 , SN: ONT1702033D NAME: \"2\", DESCR: \"WS-X6816-10GE CEF720", "Te2/3\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/3\" PID: X2-10GB-SR , VID:", "\"switching engine sub-module of 1\", \"pid\": \"WS-F6K-PFC3BXL\", \"sn\": \"SAL11434LYG\", \"vid\":", "PID: WS-F6700-CFC , VID: V06, SN: SAL13516QS8 NAME: \"FAN-MOD-4HS 1\",", "{ 'other': { 'C3900 AC Power Supply 1': { 'descr':", "TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\\ TestShowPlatformHardware as test_show_platform_hardware_iosxe,\\ TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\\ TestShowPlatformHardwareQfpBqsOpmMapping", "* 1 30 WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M Configuration register is 0xF", "config 267 -rw- 147988420 Mar 29 2017 00:00:00 +00:00 vios-adventerprisek9-m", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_7) def test_golden_output_8(self): self.maxDiff = None self.device =", "PID: IOSv , VID: 1.0, SN: 9K66Z7TOKAACDEQA24N7S '''} golden_parsed_output_2 =", "from unittest.mock import Mock from pyats.topology import Device from genie.metaparser.util.exceptions", "{ 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 -", "'compiled_by': 'prod_rel_team', 'compiled_date': 'Fri 05-Aug-11 00:32', 'curr_config_register': '0x2102', 'hostname': 'best-c3945-IOS3',", "test_golden_serdes(self): self.device = Mock(**self.golden_output_serdes) obj = ShowPlatformHardwareSerdes(device=self.device) parsed_output = obj.parse(slot='0')", "platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch(test_show_switch_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj", "Version 15.0(1r)M13, RELEASE SOFTWARE (fc1) best-c3945-IOS3 uptime is 1 hour,", "}, }, }, 'vid': 'V05 ', }, }, }, 'C3900", ": CMMFF00ARC Hardware Board Revision Number : 0x04 Switch Ports", "uc None None None data datak9 Permanent datak9 Configuration register", "VID: V01L , SN: LTP13579L3R NAME: \"TenGigabitEthernet2/1/1\", DESCR: \"SFP-10GBase-LR\" PID:", "= platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_c3850) def test_golden_asr1k(self): self.maxDiff = None self.dev_asr1k", "'Reload Command', 'last_reload_type': 'Normal Reload', 'license_udi': { 'device_num': { '*0':", "\"pid\": \"VS-F6K-PFC4\", \"sn\": \"SAL17163901\", \"vid\": \"V03\", }, }, \"4\": {", "ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_slot) def", "BRI U (2091, 3086) on Slot 0 SubSlot 1', 'pid':", "{ \"descr\": \"WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1\", \"name\":", "SN: FXS170802GL NAME: \"1\", DESCR: \"VS-SUP2T-10G 5 ports Supervisor Engine", "23-Nov-06 06:26 by kellythw Image text-base: 0x40101040, data-base: 0x42D98000 ROM:", "Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)', 'rtr_type': 'CISCO3945-CHASSIS', 'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at':", "'rom': 'System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)', 'rtr_type': 'CISCO3945-CHASSIS',", "image\", \"rom\": \"System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)\", \"bootldr\":", "12.2(17r)S4, RELEASE SOFTWARE (fc1) BOOTLDR: s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7,", "= {'execute.return_value': ''} golden_parsed_output = { \"five_sec_cpu_total\": 13, \"five_min_cpu\": 15,", "self.golden_parsed_output_active) class test_show_env(test_show_env_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj =", "\", } }, }, } } }, \"3\": { \"lc\":", "'0': { 'rp': { 'C3900-SPE150/K9': { 'descr': 'Cisco Services Performance", "Support: http://www.cisco.com/techsupport Copyright (c) 1986-2017 by Cisco Systems, Inc. Compiled", "PID: AIM-VPN/SSL-2 , VID: V01, SN: FOC2837465E '''} golden_parsed_output_7 =", "V01, SN: FOC2837465E '''} golden_parsed_output_7 = { 'main': { 'chassis':", "at 600Mhz, Implementation 0x504, Rev 1.2, 512KB L2 Cache Last", "0xF '''} golden_parsed_output_ios_1 = { 'version': {'version_short': '15.2', 'platform': 'C3750E',", "2\", \"descr\": \"OSR-7600 Clock FRU 2\", \"pid\": \"CLK-7600\", \"vid\": \"\",", "362874, \"pid\": 368, \"five_sec_cpu\": 1.03 }, 2: { \"invoked\": 1466728,", "from pyats.topology import Device from genie.metaparser.util.exceptions import SchemaEmptyParserError,\\ SchemaMissingKeyError from", "{ \"VS-SUP2T-10G\": { \"name\": \"1\", \"descr\": \"VS-SUP2T-10G 5 ports Supervisor", "109 1.03% 0.54% 0.48% 0 PIM Process 84 3582279 1466728", "configuration memory. Base ethernet MAC Address : AC:F2:C5:FF:55:E7 Motherboard assembly", "Private Network (VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-3', 'sn':", "Distributed Forwarding Card 4 Rev. 1.2\", \"name\": \"WS-F6K-DFC4-E Distributed Forwarding", "{ 'SFP-10G-SR': { 'descr': 'SFP-10GBase-SR', 'name': 'TenGigabitEthernet1/1/1', 'pid': 'SFP-10G-SR', 'sn':", "show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\\ ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\\ TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\\ TestShowEnv as test_show_env_iosxe,\\", "ONT170202T5 NAME: \"Transceiver Te2/5\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/5\" PID:", "= platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev = Device(name='c3850') empty_output", "found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact", "self.dev_iosv = Mock(**self.golden_output_ios_cat6k) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output,", "\"descr\": \"X2 Transceiver 10Gbase-SR Te2/16\", \"name\": \"Transceiver Te2/16\", \"pid\": \"X2-10GB-SR\",", "Rev. 1.8\" PID: WS-F6K-PFC3BXL , VID: V01, SN: SAL11434LYG NAME:", "by power-on System restarted at 12:22:21 PDT Mon Sep 10", "DESCR: \"WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0\" PID:", "}, \"PS 2 PWR-2700-AC/4\": { \"other\": { \"PS 2 PWR-2700-AC/4\":", "\"mem_size\": { \"non-volatile configuration\": \"256\" }, \"system_image\": \"flash0:/vios-adventerprisek9-m\", \"curr_config_register\": \"0x0\",", "serial number : FDO172217ED System serial number : FDO1633Q14S Top", "platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) parsed_output = platform_obj.parse(status='active') self.assertEqual(parsed_output, self.golden_parsed_output_active) class", "TestShowSwitch as test_show_switch_iosxe,\\ TestShowSwitchDetail as test_show_switch_detail_iosxe class TestShowVersion(unittest.TestCase): dev1 =", "self.golden_parsed_output_active_ipm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with", "on Slot 0 SubSlot 1', 'pid': 'WIC-1B-U-V2', 'sn': '10293847', 'vid':", "\"descr\": \"X2 Transceiver 10Gbase-SR Te2/3\", \"name\": \"Transceiver Te2/3\", \"pid\": \"X2-10GB-SR\",", "10Gbase-SR Te2/2\", \"name\": \"Transceiver Te2/2\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702020H\", \"vid\":", "= Device(name='iosv') empty_output = {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\\", "(fc2)\", \"uptime_in_curr_state\": \"1 day, 16 hours, 42 minutes\", \"config_register\": \"0x0\",", "\"vid\": \"V06\", } } }, } } }, }, }", "IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 12.2(55)SE8, RELEASE SOFTWARE (fc2)", "1.1\", \"name\": \"switching engine sub-module of 2\", \"pid\": \"WS-F6700-DFC3CXL\", \"sn\":", "of 2\", \"pid\": \"WS-F6K-DFC4-E\", \"sn\": \"SAL171846RF\", \"vid\": \"V02\", } },", "= { 'slot': { '1': { 'rp': { 'WS-C0123X-45T-S': {", "SFP-10G-SR , VID: V03 , SN: SPC1519005V NAME: \"2\", DESCR:", "Implementation 0x504, Rev 1.2, 512KB L2 Cache Last reset from", "1 - FlexStackPlus Module\", DESCR: \"Stacking Module\" PID: C1010X-STACK ,", "experienced = 0 Standby failures = 0 Last switchover reason", "\"pid\": 84, \"five_sec_cpu\": 0.55 }, 3: { \"invoked\": 116196, \"usecs\":", "'version_short': '15.0', }, } def test_empty(self): self.dev1 = Mock(**self.empty_output) version_obj", "file is \"flash0:c3900-universalk9-mz.SPA.150-1.M7.bin\" Last reload type: Normal Reload Last reload", "VID: V01, SN: FOC758693YO NAME: \"Clear/Subrate T3/E3 WAN on Slot", "Mock(**self.golden_output) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse(key_word='CPU', sort_time='5min') self.assertEqual(parsed_output, self.golden_parsed_output)", "= None self.dev_iosv = Mock(**self.golden_output_iosv) inventory_obj = ShowInventory(device=self.dev_iosv) parsed_output =", "Number : 800-33746-04 Top Assembly Revision Number : B0 Version", "DSP SIMM with four DSPs', 'name': 'PVDMII DSP SIMM with", "\"name\": \"FAN-MOD-4HS 1\", \"descr\": \"High Speed Fan Module for CISCO7604", "Rev. 1.1\", \"name\": \"switching engine sub-module of 2\", \"pid\": \"WS-F6700-DFC3CXL\",", "\"WS-C6504-E\", DESCR: \"Cisco Systems Cisco 6500 4-slot Chassis System\" PID:", "116196 976 0.15% 0.07% 0.07% 0 OSPF-1 Hello '''} def", "interfaces 1 Virtual Private Network (VPN) Module DRAM configuration is", "iotype='opm') self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def test_empty(self): self.device = Mock(**self.empty_output)", "= ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') self.assertEqual(parsed_output,", "with applicable laws and regulations. If you are unable to", "\"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\" PID: WS-SUP720 , VID: ,", "test_show_platform(test_show_platform_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowPlatform(device=self.dev1) with", "test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowProcessesCpuSorted(device=self.dev)", "\"SAL17152EG9\", \"subslot\": { \"0\": { \"WS-F6K-DFC4-A\": { \"descr\": \"WS-F6K-DFC4-A Distributed", "email to <EMAIL>. Cisco CISCO3945-CHASSIS (revision 1.1) with C3900-SPE150/K9 with", "= Mock(**self.empty_output) obj = ShowPlatformHardware(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse()", "}, 'WS-C1010XR-48FPS-I': { 'descr': 'WS-C1010XR-48FPS-I', 'name': '1', 'pid': 'WS-C1010XR-48FPS-I', 'sn':", "SOFTWARE (fc1)\", \"hostname\": \"cat6k_tb1\", \"uptime\": \"10 weeks, 5 days, 5", "\"9K66Z7TOKAACDEQA24N7S\", \"chassis\": \"IOSv\", \"image_id\": \"VIOS-ADVENTERPRISEK9-M\", 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 29-Mar-17", "'slot': { '0': { 'rp': { 'CISCO3825': { 'subslot': {", "ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.parsed_output) class test_dir(unittest.TestCase): dev1 =", "(VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-3', 'sn': 'FOC758693YO', 'vid':", "'TenGigabitEthernet1/1/1', 'pid': 'SFP-10G-SR', 'sn': 'SPC1519005V', 'vid': 'V03 ', }, },", "\"descr\": \"WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1\", \"name\": \"switching", "1': { 'other': { 'C3900 AC Power Supply 1': {", "{ 'GLC-SX-MMD': { 'descr': '1000BaseSX SFP', 'name': 'GigabitEthernet1/0/49', 'pid': 'GLC-SX-MMD',", "kellythw Image text-base: 0x40101040, data-base: 0x42D98000 ROM: System Bootstrap, Version", "{ 'license_level': 'datak9', 'license_type': 'Permanent', 'next_reload_license_level': 'datak9', }, 'ipbase': {", "= { \"active\": { \"boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\", \"configuration_register\": \"0x2012\" }, \"next_reload_boot_variable\":", "variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE variable = BOOTLDR variable = Configuration", "with self.assertRaises(SchemaEmptyParserError): parsed_output = redundancy_obj.parse() def test_golden_iosv(self): self.maxDiff = None", "DESCR: \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4\" PID:", "'1': { 'C3KX-PWR-350WAC': { 'descr': 'ABC Power Supply', 'name': 'Switch", "obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0', internal=True) self.maxDiff = None", "PID: X2-10GB-SR , VID: V06 , SN: ONT17020338 NAME: \"Transceiver", "configuration memory. Base ethernet MAC Address : 84:3D:C6:FF:F1:B8 Motherboard assembly", "for Module:'c3900' ----------------------------------------------------------------- Technology Technology-package Technology-package Current Type Next reboot", "ShowPlatformHardwareQfpStatisticsDrop( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(status='active') def test_golden_active(self): self.maxDiff", "self.device = Mock(**self.empty_output) platform_obj = ShowProcessesCpuHistory(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "5 ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5\"", "Cisco 3900 ISR on Slot 0', 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6',", "on Slot 0 SubSlot 1', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675W3E', 'vid':", "\"descr\": \"X2 Transceiver 10Gbase-SR Te2/6\", \"name\": \"Transceiver Te2/6\", \"pid\": \"X2-10GB-SR\",", "'2/1/1': { 'SFP-10G-LR': { 'descr': 'SFP-10GBase-LR', 'name': 'TenGigabitEthernet2/1/1', 'pid': 'SFP-10G-LR',", "\"pid\": \"WS-F6700-CFC\", \"sn\": \"SAL13516QS8\", \"vid\": \"V06\", } } }, }", "\"VS-SUP2T-10G\", \"vid\": \"V05\", \"sn\": \"SAL17152N0F\", \"subslot\": { \"0\": { \"VS-F6K-MSFC5\":", "= None self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj", "12:22:21 PDT Mon Sep 10 2018 System image file is", "by prod_rel_team ROM: Bootstrap program is C3750E boot loader BOOTLDR:", "device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') def", "FOC2837465E '''} golden_parsed_output_7 = { 'main': { 'chassis': { 'CISCO2821':", "\"Bootstrap program is IOSv\", \"uptime\": \"1 day, 16 hours, 42", "\"ONT170201TT\", \"vid\": \"V06 \", } }, }, } } },", "of 1\", \"pid\": \"WS-SUP720\", \"sn\": \"SAL11434N9G\", \"vid\": \"\", }, \"WS-F6K-PFC3BXL\":", "ATA System CompactFlash 0 (Read/Write) License Info: License UDI: -------------------------------------------------", "drw- 0 Jan 30 2013 00:00:00 +00:00 boot 264 drw-", "}, }, 'vid': 'V05 ', }, }, }, 'C3900 AC", "}, }, }, 'vid': 'V00 ', }, 'WS-C1010XR-48FPS-I': { 'descr':", "as test_show_platform_power_iosxe,\\ TestShowVersionRp as test_show_version_rp_iosxe,\\ TestShowProcessesCpu as test_show_processes_cpu_iosxe,\\ TestShowProcessesCpuHistory as", "Trunk - T1/E1 on Slot 0 SubSlot 1\", DESCR: \"VWIC2-2MFT-T1/E1", "SN: LTP13579L3R NAME: \"TenGigabitEthernet2/1/1\", DESCR: \"SFP-10GBase-LR\" PID: SFP-10G-LR , VID:", "\"V01\", \"sn\": \"DCH170900PF\", } } }, \"PS 1 PWR-2700-AC/4\": {", "\"V05\", \"sn\": \"SAL11434P2C\", \"subslot\": { \"0\": { \"WS-SUP720\": { \"descr\":", "(C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)', 'hostname': 'sample_switch', 'uptime': '8", "Supply 1\", DESCR: \"C3900 AC Power Supply 1\" PID: PWR-3900-AC", "Standby not ready to show bootvar '''} def test_empty(self): self.dev", "days, 2 hours, 3 minutes System returned to ROM by", "\"permissions\": \"-rw-\" }, \"config\": { \"last_modified_date\": \"Oct 14 2013 00:00:00", "'next_reload_license_level': 'None', }, }, 'main_mem': '2027520', 'mem_size': { 'non-volatile configuration':", "3-slot Chassis System\", \"pid\": \"WS-C6503-E\", \"vid\": \"V03\", \"sn\": \"FXS1821Q2H9\", }", "\"sn\": \"SAL1128UPQ9\", \"subslot\": { \"0\": { \"WS-F6700-DFC3CXL\": { \"descr\": \"WS-F6700-DFC3CXL", "= obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_slot) def test_golden_subslot(self): self.device", "\"pid\": \"FAN-MOD-4HS\", \"vid\": \"V01\", \"sn\": \"DCH170900PF\", } } }, \"PS", "'''\\ NAME: \"IOSv\", DESCR: \"IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw", "= unsupported Hardware Mode = Simplex Maintenance Mode = Disabled", "non-volatile configuration memory. 2000880K bytes of ATA System CompactFlash 0", "self.device = Mock(**self.golden_output_2) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output,", "}, 'os': 'IOS', 'platform': 'C3750E', 'processor_type': 'PowerPC405', 'returned_to_rom_by': 'power-on', 'rom':", "= version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_1) def test_golden_ios_2(self): self.maxDiff = None self.dev_iosv", "{ 'C3900-SPE150/K9': { 'descr': 'Cisco Services Performance Engine 150 for", "= ACTIVE Uptime in current state = 1 day, 16", "Feature Card 4 EARL sub-module of 1\", DESCR: \"VS-F6K-PFC4 Policy", ": B0 Model number : WS-C3750X-24P-L Daughterboard assembly number :", "}, }, } def test_empty(self): self.dev1 = Mock(**self.empty_output) inventory_obj =", "'main': { 'chassis': { 'CISCO3845': { 'descr': '3845 chassis', 'name':", "1 - Power Supply 1', 'pid': 'C3KX-PWR-350WAC', 'sn': 'DTN1504L0E9', 'vid':", "to ROM by power-on System restarted at 12:22:21 PDT Mon", "PWR-2700-AC/4\": { \"name\": \"PS 2 PWR-2700-AC/4\", \"descr\": \"2700W AC power", "{ '0': { 'rp': { 'CISCO3825': { 'subslot': { '0':", "'Six port FXO voice interface daughtercard on Slot 1 SubSlot", "06:26', \"image_type\": \"production image\", \"rom\": \"System Bootstrap, Version 12.2(17r)S4, RELEASE", "by prod_rel_team Configuration register = 0x0 Peer (slot: 0) information", "status='active', slot='0', iotype='ipm') self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_golden_active_opm(self): self.maxDiff = None", "ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_2) def test_golden_output_3(self): self.maxDiff =", "\"Two-Port Fast Ethernet High Speed WAN Interface Card\" PID: HWIC-2FE", "AIM-VPN/SSL-3 , VID: V01, SN: FOC758693YO NAME: \"Clear/Subrate T3/E3 WAN", "(Read/Write) 0K bytes of ATA CompactFlash 2 (Read/Write) 10080K bytes", "X2-10GB-SR , VID: V06 , SN: ONT170202UU NAME: \"Transceiver Te2/4\",", "V04, SN: SAL14017TWF NAME: \"WS-F6700-CFC Centralized Forwarding Card EARL sub-module", "'pid': 'C1010X-STACK', 'sn': 'FD232323XXZ', 'vid': 'V02 ', }, 'PWR-C2-2929WAC': {", "1\", DESCR: \"ABC Power Supply\" PID: C3KX-PWR-350WAC , VID: V01D", "self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowPlatform(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,", "PID: WS-F6K-DFC4-A , VID: V04, SN: SAL171848KL NAME: \"4\", DESCR:", "ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm)", "ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='active', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active)", "show inventory NAME: \"WS-C6503-E\", DESCR: \"Cisco Systems Catalyst 6500 3-slot", "'lc': { 'NM-16ESW': { 'descr': '16 Port 10BaseT/100BaseTX EtherSwitch', 'name':", "SN: SPC1519005V NAME: \"2\", DESCR: \"WS-C3210X-48\" PID: WS-C3210X-48T-S , VID:", "ShowVersion,\\ Dir,\\ ShowRedundancy,\\ ShowInventory,\\ ShowBootvar, \\ ShowProcessesCpuSorted,\\ ShowProcessesCpu,\\ ShowVersionRp,\\ ShowPlatform,\\", "\"five_min_cpu\": 0.48, \"runtime\": 362874, \"pid\": 368, \"five_sec_cpu\": 1.03 }, 2:", "V02L , SN: LIT03728KKK NAME: \"Switch 1 - FlexStackPlus Module\",", "\"C3900 AC Power Supply 1\", DESCR: \"C3900 AC Power Supply", "'Bootstrap program is C3750E boot loader', 'rtr_type': 'WS-C3750X-24P', 'system_image': 'flash:c3750e-universalk9-mz',", "\"CLK-7600 1\": { \"other\": { \"CLK-7600 1\": { \"name\": \"CLK-7600", "= {'execute.return_value':''' best-c3945-IOS3#show version Cisco IOS Software, C3900 Software (C3900-UNIVERSALK9-M),", "self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_golden_active_opm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_opm)", "ShowEnvironment(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff =", "\"SAL1214LAG5\", \"vid\": \"V01\", } } }, } } }, \"WS-C6503-E-FAN", "U.S. and local country laws. By using this product you", "redundancy_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class TestShowInventory(unittest.TestCase): dev1 = Device(name='empty') dev_iosv =", "Fast Ethernet High Speed WAN Interface Card\" PID: HWIC-2FE ,", "\"V06 \", } }, \"3\": { \"X2-10GB-SR\": { \"descr\": \"X2", "Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2006 by cisco Systems, Inc.", "\"Simplex\", \"communications_reason\": \"Failure\", \"standby_failures\": \"0\" }, \"slot\": { \"slot 0\":", "\"Six port FXO voice interface daughtercard\" PID: EM-HDA-6FXO , VID:", "daughtercard\" PID: EM-HDA-6FXO , VID: V03 , SN: FOC85389QXB '''}", "\"WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of 3\", \"pid\":", "\"16 Port 10BaseT/100BaseTX EtherSwitch\" PID: NM-16ESW , VID: V01 ,", "python import unittest from unittest.mock import Mock from pyats.topology import", "on', 'returned_to_rom_at': '21:57:23 UTC Sat Aug 28 2010', 'returned_to_rom_by': 'power", "WAN Interface Card\" PID: HWIC-2FE , VID: V02 , SN:", "}, '2/1/1': { 'SFP-10G-LR': { 'descr': 'SFP-10GBase-LR', 'name': 'TenGigabitEthernet2/1/1', 'pid':", "= ShowSwitch(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff", "Rev. 2.0\" PID: VS-F6K-PFC4 , VID: V03, SN: SAL17163901 NAME:", "Distributed Forwarding Card 3 Rev. 1.1\" PID: WS-F6700-DFC3CXL , VID:", "Card 4 Rev. 1.2\" PID: WS-F6K-DFC4-E , VID: V02, SN:", "'descr': 'WS-C3210X-48', 'name': '2', 'pid': 'WS-C3210X-48T-S', 'sn': 'FD5678Z90P', 'subslot': {", "hours, 15 minutes System returned to ROM by power cycle", "1 PWR-2700-AC/4\", \"descr\": \"2700W AC power supply for CISCO7604 1\",", "bootvar '''} def test_empty(self): self.dev = Mock(**self.empty_output) platform_obj = ShowBootvar(device=self.dev)", "SN: 10293847 NAME: \"PVDMII DSP SIMM with four DSPs on", "Multiflex Trunk - T1/E1\" PID: VWIC2-2MFT-T1/E1 , VID: V01 ,", "= ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_9) class test_show_bootvar(unittest.TestCase): dev", "with four DSPs\" PID: PVDM2-64 , VID: V01 , SN:", "self.assertEqual(parsed_output, self.golden_parsed_output_subslot) def test_golden_slot_internal(self): self.device = Mock(**self.golden_output_slot_internal) obj = ShowPlatformHardwarePlim(device=self.device)", "SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675U0D', 'vid': 'V01 ', },", "}, } } }, \"WS-C6503-E-FAN 1\": { \"other\": { \"WS-C6503-E-FAN", "'Switch 1 - FlexStackPlus Module', 'pid': 'C1010X-STACK', 'sn': 'FD232323XXZ', 'vid':", "\"CISCO3945-CHASSIS\" PID: CISCO3945-CHASSIS , VID: V05 , SN: FGL161010K8 NAME:", "Ethernet interfaces 2 Ten Gigabit Ethernet interfaces The password-recovery mechanism", "on Slot 0', 'pid': 'CISCO3845-MB', 'sn': 'FOC729346GQ', 'vid': 'V09 ',", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def test_empty(self): self.dev", "Location = slot 0 Current Software state = ACTIVE Uptime", "def test_golden_output_7(self): self.maxDiff = None self.device = Mock(**self.golden_output_7) obj =", "\"OSR-7600 Clock FRU 2\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\",", "Interface Card\" PID: HWIC-2FE , VID: V02 , SN: FOC16062824", "{ \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/1\", \"name\": \"Transceiver", "(VPN) Module on Slot 0\", DESCR: \"Encryption AIM Element\" PID:", "Mock(**self.golden_output_serdes_internal) obj = ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None", "FDO172217ED System serial number : FDO1633Q14S Top Assembly Part Number", "= ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='standby', status='running') self.maxDiff = None self.assertEqual(parsed_output,", "{ 'descr': 'PVDMII DSP SIMM with four DSPs', 'name': 'PVDMII", "Revision Number : F0 Version ID : V07 CLEI Code", "parsed_output = cpu_platform_obj.parse() class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output)", "'sn': 'FOC98675W3E', 'vid': 'V01 ', }, }, }, }, },", "} }, \"PS 1 PWR-1400-AC\": { \"other\": { \"PS 1", "T1/E1 on Slot 0 SubSlot 1\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port", "\"vios-adventerprisek9-m\": { \"last_modified_date\": \"Mar 29 2017 00:00:00 +00:00\", \"index\": \"267\",", "}, \"WS-F6K-PFC3BXL\": { \"descr\": \"WS-F6K-PFC3BXL Policy Feature Card 3 Rev.", "{ 'descr': 'Six port FXO voice interface daughtercard', 'name': 'Six", "parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_c3850) def test_golden_asr1k(self): self.maxDiff = None", "A0) with 524288K bytes of memory. Processor board ID FDO1633Q14S", "WAN Interface Card on Slot 0 SubSlot 3\", DESCR: \"Two-Port", "platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active',", "VID: V01, SN: ABC0830J127 '''} golden_output_4 = {'execute.return_value': ''' NAME:", "'CISCO3845-MB': { 'descr': 'c3845 Motherboard with Gigabit Ethernet', 'name': 'c3845", "'Wan Interface Card BRI U (2091, 3086)', 'name': 'Wan Interface", "X2-10GB-SR , VID: V06 , SN: ONT170202T1 NAME: \"Transceiver Te1/5\",", "C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_cat6k = { \"version\":", "us by sending email to <EMAIL>. cisco WS-C6503-E (R7000) processor", "ID FDO1633Q14S Last reset from power-on 14 Virtual Ethernet interfaces", "10Gbase-SR Te1/5\", \"name\": \"Transceiver Te1/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702033D\", \"vid\":", "Fri 05-Aug-11 00:32 by prod_rel_team ROM: System Bootstrap, Version 15.0(1r)M13,", "sending email to <EMAIL>. cisco WS-C6503-E (R7000) processor (revision 1.4)", "Compiled Wed 26-Jun-13 09:56 by prod_rel_team Image text-base: 0x00003000, data-base:", "ShowPlatformHardwarePlim(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(port='0/0/0') class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def test_golden_active_opm(self):", "Power Supply\" PID: C3KX-PWR-007CBA , VID: V01L , SN: LTP13579L3R", "'sn': 'FD5678Z90P', 'subslot': { '2': { 'C3KX-PWR-007CBA': { 'descr': 'BCA", "please contact us by sending email to <EMAIL>. Cisco CISCO3945-CHASSIS", "Rev. 1.0\", \"name\": \"WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module", "V01, SN: FXS1712Q1R8 NAME: \"CLK-7600 1\", DESCR: \"OSR-7600 Clock FRU", "= ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output,", "PWR-2700-AC/4\", DESCR: \"2700W AC power supply for CISCO7604 1\" PID:", "Forwarding Card 4 EARL sub-module of 2\", \"pid\": \"WS-F6K-DFC4-E\", \"sn\":", "http://www.cisco.com/techsupport Copyright (c) 1986-2013 by Cisco Systems, Inc. Compiled Wed", "\"\", \"sn\": \"FXS170802GL\", } } }, \"CLK-7600 2\": { \"other\":", "600Mhz, Implementation 0x504, Rev 1.2, 512KB L2 Cache Last reset", "Engine 150 for Cisco 3900 ISR\" PID: C3900-SPE150/K9 , VID:", "1 day, 16 hours, 42 minutes Image Version = Cisco", "Rev. 1.2\", \"name\": \"WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module", "Version ' '15.2(3r)E, RELEASE SOFTWARE (fc1)', 'chassis': 'WS-C3750X-24P', 'chassis_sn': 'FDO2028F1WK',", "memory. Processor board ID FDO2028F1WK Last reset from power-on 2", "nvram 269 -rw- 119 Oct 17 2018 18:57:18 +00:00 e1000_bia.txt", "1.1\" PID: WS-F6700-DFC3CXL , VID: V01, SN: SAL1214LAG5 NAME: \"WS-C6503-E-FAN", "Compiled Wed 29-Mar-17 14:05 by prod_rel_team Configuration register = 0x0", "\"pid\": \"X2-10GB-SR\", \"sn\": \"AGA1515XZE2\", \"vid\": \"V05 \", } }, \"6\":", "= Mock(**self.empty_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = cpu_platform_obj.parse()", "\"0\": { \"WS-F6700-CFC\": { \"descr\": \"WS-F6700-CFC Centralized Forwarding Card Rev.", "SN: DCH170900PF NAME: \"PS 1 PWR-2700-AC/4\", DESCR: \"2700W AC power", "\"last_switchover_reason\": \"unsupported\", \"maint_mode\": \"Disabled\", \"switchovers_system_experienced\": \"0\", \"available_system_uptime\": \"0 minutes\", \"communications\":", "\"1\", DESCR: \"WS-C1010XR-48FPS-I\" PID: WS-C1010XR-48FPS-I, VID: V05 , SN: FD2043B0K3", "'FTX7908A3RQ', 'vid': 'V05 ', }, }, }, 'slot': { '0':", "1986-2013 by Cisco Systems, Inc. Compiled Wed 26-Jun-13 09:56 by", "for CISCO7604 1\", \"pid\": \"FAN-MOD-4HS\", \"vid\": \"V01\", \"sn\": \"DCH170900PF\", }", "'C3750E-UNIVERSALK9-M', 'os': 'IOS', 'image_type': 'production image', 'compiled_date': 'Mon 22-Jan-18 04:07',", "''} golden_parsed_output_iosv = { \"red_sys_info\": { \"last_switchover_reason\": \"unsupported\", \"maint_mode\": \"Disabled\",", "1\": { \"other\": { \"WS-C6503-E-FAN 1\": { \"name\": \"WS-C6503-E-FAN 1\",", "license Level: ipservices cisco WS-C3750X-24P (PowerPC405) processor (revision W0) with", "\"production image\", \"rom\": \"System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)\",", "\"flash0:/\" } } golden_output_iosv = {'execute.return_value': '''\\ Directory of flash0:/", "test_golden_1(self): self.maxDiff = None self.dev = Mock(**self.golden_output_1) obj = ShowProcessesCpuSorted(device=self.dev)", "\"6\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/6\", \"name\":", "\"revision 1.0\", \"platform\": \"IOSv\", \"image_type\": \"production image\", 'processor_board_flash': '10080K', 'returned_to_rom_by':", "{ \"WS-F6700-CFC\": { \"descr\": \"WS-F6700-CFC Centralized Forwarding Card Rev. 4.1\",", ", SN: FOC16050QP6 NAME: \"Two-Port Fast Ethernet High Speed WAN", "6500 4-slot Chassis System\" PID: WS-C6504-E , VID: V01, SN:", "TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\\ TestShowPlatformHardware as test_show_platform_hardware_iosxe,\\ TestShowPlatformHardwarePlim", ", VID: V00 , SN: FDO123R12W NAME: \"Switch 1 -", "Transceiver 10Gbase-SR Te2/16\", \"name\": \"Transceiver Te2/16\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170201TT\",", "cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require", "'V05 ', }, }, }, '2': { 'rp': { 'WS-C3210X-48T-S':", "SIMM with four DSPs\" PID: PVDM2-64 , VID: V01 ,", "Speed WAN Interface Card\" PID: HWIC-2FE , VID: V02 ,", "1', 'pid': 'EVM-HD-8FXS/DID', 'sn': 'FOC65798TG8', 'subslot': { '1': { 'EM-HDA-6FXO':", "\"flash0:/vios-adventerprisek9-m\", \"curr_config_register\": \"0x0\", \"rom\": \"Bootstrap program is IOSv\", \"uptime\": \"1", "', }, 'PWR-C2-2929WAC': { 'descr': 'LLL Power Supply', 'name': 'Switch", "= platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch(test_show_switch_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output)", "def test_golden_output_9(self): self.maxDiff = None self.device = Mock(**self.golden_output_9) obj =", ", VID: , SN: FXS181101V4 NAME: \"1\", DESCR: \"WS-SUP720-3BXL 2", "\"flash0:/\": { \"files\": { \"e1000_bia.txt\": { \"last_modified_date\": \"Oct 17 2018", "ShowModule(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff =", "2-Port RJ-48 Multiflex Trunk - T1/E1\" PID: VWIC2-2MFT-T1/E1 , VID:", "minutes', 'returned_to_rom_by': 'power-on', 'system_restarted_at': '05:06:40 GMT Tue Sep 10 2019',", "'WS-C1010XR-48FPS-I', 'sn': 'FD2043B0K3', 'subslot': { '1': { 'C1010X-STACK': { 'descr':", "= version_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv)", "DESCR: \"C3900 AC Power Supply 1\" PID: PWR-3900-AC , VID:", "\"name\": \"Transceiver Te2/16\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170201TT\", \"vid\": \"V06 \",", "3\", DESCR: \"WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0\" PID:", "with four DSPs on Slot 0 SubSlot 4\", DESCR: \"PVDMII", ", SN: SAL17142D06 NAME: \"VS-F6K-PFC4 Policy Feature Card 4 EARL", "FOC28476ADM NAME: \"16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2\", DESCR:", "PID: C1010X-STACK , VID: V02 , SN: FD232323XXZ NAME: \"GigabitEthernet1/0/49\",", "Mock(**self.golden_output_1) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse(key_word='process') self.assertEqual(parsed_output, self.golden_parsed_output_1) def", "{ 'HWIC-2FE': { 'descr': 'Two-Port Fast Ethernet High Speed WAN", "\"Clear/Subrate T3/E3 WAN\" PID: NM-1T3/E3= , VID: V01 , SN:", "\"msfc sub-module of 1\", DESCR: \"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\"", "Mock(**self.golden_output_c3850) platform_obj = ShowSwitchDetail(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) if __name__", "Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)\", \"uptime_in_curr_state\":", "Board Revision Number : 0x05 Switch Ports Model SW Version", "V03 , SN: QCS1604P0BT '''} golden_parsed_output_5 = { 'main': {", "00:00:00 +00:00 config 267 -rw- 147988420 Mar 29 2017 00:00:00", "}, \"VS-F6K-PFC4\": { \"descr\": \"VS-F6K-PFC4 Policy Feature Card 4 Rev.", "{ 'descr': '1000BaseSX SFP', 'name': 'GigabitEthernet1/0/49', 'pid': 'GLC-SX-MMD', 'sn': 'ACW102938VS',", "'Permanent', 'next_reload_license_level': 'ipservices', 'chassis': 'WS-C3750X-24S', 'main_mem': '524288', 'processor_type': 'PowerPC405', 'rtr_type':", "ShowProcessesCpu(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_version_rp(test_show_version_rp_iosxe): def test_golden_active(self):", "is \"flash:c3750e-universalk9-mz\" This product contains cryptographic features and is subject", "'HWIC-2FE', 'sn': 'FOC16062824', 'vid': 'V02 ', }, }, }, 'vid':", "PID: VS-F6K-PFC4 , VID: V03, SN: SAL17163901 NAME: \"Transceiver Te1/4\",", "'compiled_date': 'Fri 05-Aug-11 00:32', 'curr_config_register': '0x2102', 'hostname': 'best-c3945-IOS3', 'image_id': 'C3900-UNIVERSALK9-M',", "= 0 Standby failures = 0 Last switchover reason =", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_9) class test_show_bootvar(unittest.TestCase): dev = Device(name='ios') dev_iosv =", "CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4\" PID: WS-X6748-GE-TX ,", "test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowEnvironment(device=self.dev)", "- 8FXS/DID on Slot 1', 'pid': 'EVM-HD-8FXS/DID', 'sn': 'FOC65798TG8', 'subslot':", "'name': '1', 'pid': 'SM-ES2-16-P', 'sn': 'FOC09876NP3', 'vid': '', }, },", "Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE", "9 2019', 'uptime': '1 hour, 20 minutes', 'version': '15.0(1)M7', 'version_short':", ", SN: SAL11434N9G NAME: \"switching engine sub-module of 1\", DESCR:", "is not available because it is in 'DISABLED' state '''}", "\"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\" This product contains cryptographic features and is subject to", "R5 uptime is 9 weeks, 4 days, 2 hours, 3", "ROM by power-on System restarted at 12:22:21 PDT Mon Sep", "PID: WS-F6K-DFC4-E , VID: V02, SN: SAL171846RF NAME: \"Transceiver Te2/1\",", "Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2013 by Cisco Systems, Inc.", "Bridging software. TN3270 Emulation software. 1 Virtual Ethernet/IEEE 802.3 interface", "'compiled_date': 'Thu 23-Nov-06 06:26', \"image_type\": \"production image\", \"rom\": \"System Bootstrap,", "1.0\" PID: WS-F6K-DFC4-A , VID: V04, SN: SAL171848KL NAME: \"4\",", "} golden_output_8 = {'execute.return_value': ''' NAME: \"3825 chassis\", DESCR: \"3825", "ShowProcessesCpuPlatform(device=self.device) parsed_output = cpu_platform_obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def", "four DSPs\" PID: PVDM2-64 , VID: V01 , SN: FOC63358WSI", "V01, SN: FOC758693YO NAME: \"Clear/Subrate T3/E3 WAN on Slot 1\",", "\"FXS181101V4\", } } }, \"1\": { \"rp\": { \"WS-SUP720-3BXL\": {", "'pid': 'PWR-C2-2929WAC', 'sn': 'LIT03728KKK', 'vid': 'V02L ', }, }, '1/0/49':", "obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_slot) def test_golden_subslot(self): self.device =", "by Meridian Technology Corp). X.25 software, Version 3.0.0. Bridging software.", ", VID: V02 , SN: FOC16062824 NAME: \"C3900 AC Power", "'version_short': '12.2' } } golden_output_ios = {'execute.return_value': '''\\ Cisco IOS", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_7) def test_golden_output_8(self): self.maxDiff = None self.device", "'vid': 'V03 ', }, }, }, 'vid': 'V04 ', },", "= Dir(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsered_output = dir_obj.parse() def test_semi_empty(self): self.dev1", "this product you agree to comply with applicable laws and", "RELEASE SOFTWARE (fc1)', 'chassis': 'WS-C3750X-24P', 'chassis_sn': 'FDO2028F1WK', 'curr_config_register': '0xF', 'compiled_by':", "self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.device", "'best-c3945-IOS3', 'image_id': 'C3900-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'Reload Command', 'last_reload_type':", "\"PVDMII DSP SIMM with four DSPs on Slot 0 SubSlot", "16 port 10GE Rev. 2.0\" PID: WS-X6816-10GE , VID: V02,", "\"X2 Transceiver 10Gbase-SR Te2/1\" PID: X2-10GB-SR , VID: V06 ,", "test_empty(self): self.dev1 = Mock(**self.empty_output) redundancy_obj = ShowRedundancy(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "test_golden(self): self.device = Mock(**self.golden_output_serdes_internal) obj = ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output = obj.parse(slot='0')", "V06 , SN: ONT170201TT NAME: \"3\", DESCR: \"WS-X6824-SFP CEF720 24", "'ipbase': { 'license_level': 'ipbasek9', 'license_type': 'Permanent', 'next_reload_license_level': 'ipbasek9', }, 'security':", "}, } } }, \"3\": { \"lc\": { \"WS-X6824-SFP\": {", "Ethernet Rev. 2.6\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V02\", \"sn\": \"SAL1128UPQ9\", \"subslot\":", "ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def", "= Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "\"3825 chassis\" PID: CISCO3825 , VID: V05 , SN: FTX7908A3RQ", "\"3845 chassis\", DESCR: \"3845 chassis\" PID: CISCO3845 , VID: V05", "securityk9 uc None None None data datak9 Permanent datak9 Configuration", "'Thu 23-Nov-06 06:26', \"image_type\": \"production image\", \"rom\": \"System Bootstrap, Version", "'vid': 'V01 ', }, }, }, }, } def test_empty(self):", "30 WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_1", "Assembly Revision Number : B0 Version ID : V03 CLEI", "obj.parse() class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def test_golden_port(self): self.device = Mock(**self.golden_output_port) obj =", "= obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) def test_empty(self):", "authority to import, export, distribute or use encryption. Importers, exporters,", "self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_cat6k) version_obj = ShowVersion(device=self.dev_iosv) parsed_output", "WS-C6503-E , VID: V03, SN: FXS1821Q2H9 NAME: \"CLK-7600 1\", DESCR:", "'device_num': { '*0': { 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6' } }", "'chassis_sn': 'FDO2028F1WK', 'curr_config_register': '0xF', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 26-Jun-13 09:56',", "\"vid\": \"V03\", \"sn\": \"APS17070093\", } } }, \"1\": { \"rp\":", "\"0x0\", \"curr_sw_state\": \"ACTIVE\" } } } golden_output_iosv = {'execute.return_value': '''\\", "4 EARL sub-module of 3\", DESCR: \"WS-F6K-DFC4-A Distributed Forwarding Card", "983008K/65536K bytes of memory. Processor board ID FXS1821Q2H9 SR71000 CPU", ", SN: FTX1234AMWT NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk", "''} semi_empty_output = {'execute.return_value': '''\\ Directory of flash:/ '''} golden_parsed_output_iosv", "150 for Cisco 3900 ISR on Slot 0', 'pid': 'C3900-SPE150/K9',", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) def test_empty(self): self.device1 = Mock(**self.empty_output)", "DESCR: \"Clear/Subrate T3/E3 WAN\" PID: NM-1T3/E3= , VID: V01 ,", "{'execute.return_value': ''} golden_parsed_output_iosv = { 'main': { 'chassis': { 'IOSv':", "'subslot': { '1': { 'C3KX-PWR-350WAC': { 'descr': 'ABC Power Supply',", "Transceiver 10Gbase-SR Te1/5\" PID: X2-10GB-SR , VID: V06 , SN:", "parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) def", "2018 18:57:10 +00:00\", \"index\": \"268\", \"size\": \"524288\", \"permissions\": \"-rw-\" },", "1\", \"pid\": \"VS-F6K-MSFC5\", \"sn\": \"SAL17142D06\", \"vid\": \"\", }, \"VS-F6K-PFC4\": {", "self.device = Mock(**self.golden_output_port) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(port='0/0/0') self.maxDiff", "\"index\": \"1\", \"size\": \"0\", \"permissions\": \"drw-\" }, \"vios-adventerprisek9-m\": { \"last_modified_date\":", "None self.dev = Mock(**self.golden_output) obj = ShowEnvironment(device=self.dev) parsed_output = obj.parse()", "Slot 0\", DESCR: \"Encryption AIM Element\" PID: AIM-VPN/SSL-2 , VID:", "software. 1 Virtual Ethernet/IEEE 802.3 interface 50 Gigabit Ethernet/IEEE 802.3", ", VID: V01, SN: FXS1712Q1R8 NAME: \"CLK-7600 1\", DESCR: \"OSR-7600", "{ \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/5\", \"name\": \"Transceiver", "DESCR: \"WS-C3210X-48\" PID: WS-C3210X-48T-S , VID: V02 , SN: FD5678Z90P", "= ShowRedundancy(device=self.dev_iosv) parsed_output = redundancy_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class TestShowInventory(unittest.TestCase): dev1", "1\", DESCR: \"LLL Power Supply\" PID: PWR-C2-2929WAC , VID: V02L", "Daughterboard assembly number : 800-32727-03 Daughterboard serial number : FDO172217ED", "= Simplex Maintenance Mode = Disabled Communications = Down Reason:", "parsed_output = cpu_platform_obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_empty(self):", "ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_5) def test_golden_output_6(self): self.maxDiff =", "ShowPlatformHardwareQfpBqsIpmMapping,\\ ShowPlatformHardwareSerdes,\\ ShowPlatformHardwareSerdesInternal,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\\ ShowPlatformHardwareQfpStatisticsDrop,\\ ShowEnvironment,\\ ShowModule,\\ ShowSwitch, ShowSwitchDetail", "\"1\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/1\", \"name\":", "42 minutes Image Version = Cisco IOS Software, IOSv Software", "15.2(3r)E, RELEASE SOFTWARE (fc1) R5 uptime is 9 weeks, 4", "WS-C3750X-24P-L Daughterboard assembly number : 800-32727-03 Daughterboard serial number :", "obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware(test_show_platform_hardware_iosxe):", "4 Rev. 1.0\", \"name\": \"WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL", "summary of U.S. laws governing Cisco cryptographic products may be", "program is IOSv '''} golden_parsed_output_iosv = { \"version\": { \"last_reload_reason\":", "WS-F6700-DFC3CXL , VID: V01, SN: SAL1214LAG5 NAME: \"WS-C6503-E-FAN 1\", DESCR:", "golden_output_ios_1 = {'execute.return_value': '''\\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M),", "e1000_bia.txt 2142715904 bytes total (1989595136 bytes free) '''} def test_empty(self):", "\"V06 \", } }, \"5\": { \"X2-10GB-SR\": { \"descr\": \"X2", "by reload at 10:26:47 EST Mon Dec 9 2019 System", "mechanism is enabled. 512K bytes of flash-simulated non-volatile configuration memory.", "\"ONT170202T1\", \"vid\": \"V06 \", } }, \"5\": { \"X2-10GB-SR\": {", "self.device = Mock(**self.golden_output_standby) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='standby', status='running')", "Mock(**self.golden_output) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse() self.maxDiff = None", "active is 10 weeks, 5 days, 5 hours, 15 minutes", "'GE-DCARD-ESW': { 'descr': 'Gigabit(1000BaseT) module for EtherSwitch NM', 'name': 'Gigabit(1000BaseT)", "self.assertEqual(parsed_output, self.golden_parsed_output_7) def test_golden_output_8(self): self.maxDiff = None self.device = Mock(**self.golden_output_8)", "'28', 'FastEthernet': '1' }, 'os': 'IOS', 'platform': 'C3750E', 'processor_type': 'PowerPC405',", "12.2(58r)SE, RELEASE SOFTWARE (fc1) sample_switch uptime is 8 weeks, 3", "'CISCO3945-CHASSIS', 'pid': 'CISCO3945-CHASSIS', 'sn': 'FGL161010K8', 'vid': 'V05 ', }, },", "register is 0xF '''} golden_parsed_output_ios_cat6k = { \"version\": { \"os\":", "\"X2-10GB-SR\", \"sn\": \"AGA1515XZE2\", \"vid\": \"V05 \", } }, \"6\": {", "Card\" PID: HWIC-2FE , VID: V02 , SN: FOC16062824 NAME:", "= obj.parse(rp='standby', status='running') def test_empty(self): self.device1 = Mock(**self.empty_output) obj =", "\"pid\": \"WS-C6503-E-FAN\", \"vid\": \"V02\", \"sn\": \"DCH183500KW\", } } }, \"PS", "\"mem_size\": {\"non-volatile configuration\": \"1917\", \"packet buffer\": \"8192\"}, \"curr_config_register\": \"0x2102\", }", "Fan Module for CISCO7604 1\" PID: FAN-MOD-4HS , VID: V01,", "\"bytes_free\": \"1989595136\" }, \"dir\": \"flash0:/\" } } golden_output_iosv = {'execute.return_value':", "'NM-16ESW', 'sn': 'FOC135464KO', 'subslot': { '0': { 'GE-DCARD-ESW': { 'descr':", "29-Mar-17 14:05 by prod_rel_team Configuration register = 0x0 Peer (slot:", "'C3750E', 'version': '15.2(2)E8', 'image_id': 'C3750E-UNIVERSALK9-M', 'os': 'IOS', 'image_type': 'production image',", "20 minutes System returned to ROM by reload at 10:26:47", "Hw Revision: 1.0', 'name': 'IOSv', 'pid': 'IOSv', 'sn': '9K66Z7TOKAACDEQA24N7S', 'vid':", "{ \"name\": \"2\", \"descr\": \"WS-X6816-10GE CEF720 16 port 10GE Rev.", "of Cisco cryptographic products does not imply third-party authority to", "\"available_system_uptime\": \"0 minutes\", \"communications\": \"Down\", \"hw_mode\": \"Simplex\", \"communications_reason\": \"Failure\", \"standby_failures\":", "System\" PID: WS-C6503-E , VID: V03, SN: FXS1821Q2H9 NAME: \"CLK-7600", "\"rtr_type\": \"WS-C6503-E\", \"chassis_sn\": \"FXS1821Q2H9\", \"last_reload_reason\": \"s/w reset\", 'processor_board_flash': '65536K', \"number_of_intfs\":", "Forwarding Card EARL sub-module of 4\", DESCR: \"WS-F6700-CFC Centralized Forwarding", "def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj =", "CEF720 24 port 1000mb SFP Rev. 1.0\" PID: WS-X6824-SFP ,", "T1/E1\" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675W3E NAME:", "CISCO3845 , VID: V05 , SN: FTX6666ARJ9 NAME: \"c3845 Motherboard", "ShowVersion(device=self.dev1) with self.assertRaises(KeyError): parsed_output = version_obj.parse() def test_golden_iosv(self): self.maxDiff =", "1466728 2442 0.55% 0.87% 2.77% 0 IOSv e1000 412 113457", "\"sn\": \"SAL17142D06\", \"vid\": \"\", }, \"VS-F6K-PFC4\": { \"descr\": \"VS-F6K-PFC4 Policy", "= Mock(**self.golden_output_ios) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios)", "B0 Version ID : V03 CLEI Code Number : CMMFF00ARC", "10/100/1000mb Ethernet Rev. 3.4\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V04\", \"sn\": \"SAL14017TWF\",", "{ 'version': {'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version ' '15.2(3r)E,", "sort_time='5min') self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.dev =", "\"descr\": \"X2 Transceiver 10Gbase-SR Te2/2\", \"name\": \"Transceiver Te2/2\", \"pid\": \"X2-10GB-SR\",", "'platform': 'C3750E', 'processor_type': 'PowerPC405', 'returned_to_rom_by': 'power-on', 'rom': 'Bootstrap program is", "1466728, \"usecs\": 2442, \"tty\": 0, \"one_min_cpu\": 0.87, \"process\": \"IOSv e1000\",", "{ \"descr\": \"X2 Transceiver 10Gbase-SR Te2/16\", \"name\": \"Transceiver Te2/16\", \"pid\":", "laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html", "import Device from genie.metaparser.util.exceptions import SchemaEmptyParserError,\\ SchemaMissingKeyError from genie.libs.parser.ios.show_platform import", "PID: X2-10GB-SR , VID: V06 , SN: ONT1702033D NAME: \"2\",", "Forwarding Card 4 Rev. 1.2\", \"name\": \"WS-F6K-DFC4-E Distributed Forwarding Card", "= {'execute.return_value': '''\\ Directory of flash:/ '''} golden_parsed_output_iosv = {", "platform_obj.parse() def test_semi_empty(self): self.dev2 = Mock(**self.semi_empty_output) platform_obj = ShowPlatform(device=self.dev2) with", "Systems Cisco 6500 4-slot Chassis System\", \"pid\": \"WS-C6504-E\", \"vid\": \"V01\",", "{ 'descr': 'Wan Interface Card BRI U (2091, 3086)', 'name':", "class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll(", "\"OSR-7600 Clock FRU 2\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\",", "self.dev_iosv = Mock(**self.golden_output_iosv) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output,", "= ShowPlatformHardwareSerdesInternal(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def", "test_show_platform_hardware(test_show_platform_hardware_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowPlatformHardware(device=self.device) parsed_output", "golden_parsed_output_4 = { 'slot': { '1': { 'rp': { 'WS-C0123X-45T-S':", "= ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_5) def test_golden_output_6(self): self.maxDiff", "Mock(**self.golden_output_slot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None", "'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2' }, 'mem_size': {", ", VID: , SN: SAL17142D06 NAME: \"VS-F6K-PFC4 Policy Feature Card", "= {'execute.return_value': ''' NAME: \"1\", DESCR: \"WS-C8888X-88\" PID: WS-C0123X-45T-S ,", "cisco Systems, Inc. Compiled Thu 23-Nov-06 06:26 by kellythw Image", "self.golden_parsed_output_active_ipm) def test_golden_active_opm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_opm) platform_obj", "{ \"CLK-7600 1\": { \"name\": \"CLK-7600 1\", \"descr\": \"OSR-7600 Clock", "30 2013 00:00:00 +00:00 boot 264 drw- 0 Oct 14", "self.device = Mock(**self.golden_output_serdes_internal) obj = ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff", "\"FXS181101V4\", } } }, \"CLK-7600 2\": { \"other\": { \"CLK-7600", "self.device = Mock(**self.golden_output_1) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse(key_word='process') self.assertEqual(parsed_output,", "DESCR: \"X2 Transceiver 10Gbase-SR Te1/5\" PID: X2-10GB-SR , VID: V06", "configuration is 72 bits wide with parity disabled. 256K bytes", "further assistance please contact us by sending email to <EMAIL>.", "Ethernet/IEEE 802.3 interfaces 1917K bytes of non-volatile configuration memory. 8192K", "Inc. Compiled Fri 05-Aug-11 00:32 by prod_rel_team ROM: System Bootstrap,", "Card 3 Rev. 1.8\", \"name\": \"switching engine sub-module of 1\",", "= ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='active', status='running') self.maxDiff = None self.assertEqual(parsed_output,", "self.dev1 = Mock(**self.empty_output) redundancy_obj = ShowRedundancy(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "ShowPlatformHardwareSerdes(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def test_golden(self):", "3900 ISR on Slot 0', 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6', 'subslot':", "FXO voice interface daughtercard on Slot 1 SubSlot 1\", DESCR:", "FOC758693YO NAME: \"Clear/Subrate T3/E3 WAN on Slot 1\", DESCR: \"Clear/Subrate", "= platform_obj.parse( status='active', slot='0', iotype='opm') self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def", "DESCR: \"LLL Power Supply\" PID: PWR-C2-2929WAC , VID: V02L ,", "TestShowEnv as test_show_env_iosxe,\\ TestShowModule as test_show_module_iosxe,\\ TestShowSwitch as test_show_switch_iosxe,\\ TestShowSwitchDetail", "\"WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0\", \"name\": \"WS-F6K-DFC4-A Distributed", "Bootstrap program is IOSv N95_1 uptime is 1 day, 16", "'ACW102938VS', 'vid': 'V01 ', }, }, }, 'vid': 'V05 ',", "\"X2 Transceiver 10Gbase-SR Te1/4\" PID: X2-10GB-SR , VID: V06 ,", "2 PWR-2700-AC/4\", DESCR: \"2700W AC power supply for CISCO7604 2\"", "= Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} semi_empty_output", "Te2/4\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/4\" PID: X2-10GB-SR , VID:", "}, }, 'slot': { '0': { 'lc': { 'CISCO3845-MB': {", "Internetwork Operating System Software IOS (tm) s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version", "EtherSwitch', 'name': '16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2', 'pid':", "configuration': '255', }, 'number_of_intfs': { 'FastEthernet': '2', 'Gigabit Ethernet': '3',", "4 EARL sub-module of 2\", \"pid\": \"WS-F6K-DFC4-E\", \"sn\": \"SAL171846RF\", \"vid\":", "'license_level': 'securityk9', 'license_type': 'Permanent', 'next_reload_license_level': 'securityk9', }, 'uc': { 'license_level':", "minutes\" } } golden_output_iosv = {'execute.return_value': '''\\ Cisco IOS Software,", "{ 'non-volatile configuration': '255', }, 'number_of_intfs': { 'FastEthernet': '2', 'Gigabit", "parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') def test_golden_active_ipm(self): self.maxDiff =", "text-base: 0x00003000, data-base: 0x02800000 ROM: Bootstrap program is C3750E boot", "}, 'vid': 'V05 ', }, }, }, 'C3900 AC Power", "Te2/1\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT17020338\", \"vid\": \"V06 \", } },", "\"V02\", } }, \"1\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver", "image file is \"flash:c3750e-universalk9-mz\" This product contains cryptographic features and", "Rev. 2.0\", \"pid\": \"WS-X6816-10GE\", \"vid\": \"V02\", \"sn\": \"SAL17152QB3\", \"subslot\": {", "}, 'vid': 'V00 ', }, 'WS-C1010XR-48FPS-I': { 'descr': 'WS-C1010XR-48FPS-I', 'name':", "version Cisco Internetwork Operating System Software IOS (tm) s72033_rp Software", "10 2019 System image file is \"flash:c3750e-universalk9-mz.152-2.E8.bin\" Last reload reason:", "test_golden_slot(self): self.device = Mock(**self.golden_output_slot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0')", "'FDO123R12W', 'subslot': { '1': { 'C3KX-PWR-350WAC': { 'descr': 'ABC Power", "3.4\" PID: WS-X6748-GE-TX , VID: V04, SN: SAL14017TWF NAME: \"WS-F6700-CFC", "{ '0': { 'other': { 'AIM-VPN/SSL-2': { 'descr': 'Encryption AIM", "}, }, 'vid': 'V00 ', }, 'WS-C1010XR-48FPS-I': { 'descr': 'WS-C1010XR-48FPS-I',", "file is \"flash:c3750e-universalk9-mz.152-2.E8.bin\" Last reload reason: Reload command This product", "as test_show_platform_hardware_plim_iosxe,\\ TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\\ TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\\ TestShowPlatformHardwareSerdesStatistics as", "def test_golden_ios_2(self): self.maxDiff = None self.dev_iosv = Mock(**self.device_output) version_obj =", "slot='0') class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def test_golden_active_ipm(self): self.device = Mock(**self.golden_output_active_ipm) obj =", "'descr': '1000BaseSX SFP', 'name': 'GigabitEthernet1/0/49', 'pid': 'GLC-SX-MMD', 'sn': 'ACW102938VS', 'vid':", "NAME: \"1\", DESCR: \"WS-C1010XR-48FPS-I\" PID: WS-C1010XR-48FPS-I, VID: V05 , SN:", "'512' }, 'curr_config_register': '0xF' } } device_output = {'execute.return_value':''' best-c3945-IOS3#show", "\"chassis\": \"IOSv\", \"image_id\": \"VIOS-ADVENTERPRISEK9-M\", 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 29-Mar-17 14:05',", "'None', 'license_type': 'None', 'next_reload_license_level': 'None', }, }, 'main_mem': '2027520', 'mem_size':", "'1' }, \"mem_size\": {\"non-volatile configuration\": \"1917\", \"packet buffer\": \"8192\"}, \"curr_config_register\":", "weeks, 5 days, 5 hours, 16 minutes Time since cat6k_tb1", "} golden_output = {'execute.return_value': '''\\ show processes cpu sorted 5min", "dev = Device(name='c3850') empty_output = {'execute.return_value': ''} golden_parsed_output = {", "= Mock(**self.golden_output_iosv) dir_obj = Dir(device=self.dev_iosv) parsed_output = dir_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv)", "NAME: \"TenGigabitEthernet1/1/1\", DESCR: \"SFP-10GBase-SR\" PID: SFP-10G-SR , VID: V03 ,", "'''} def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowProcessesCpuSorted(device=self.dev) with", "10080K bytes of ATA CompactFlash 3 (Read/Write) Configuration register is", "Supply', 'name': 'Switch 2 - Power Supply 1', 'pid': 'C3KX-PWR-007CBA',", "\"8192\"}, \"curr_config_register\": \"0x2102\", } } golden_output_ios_cat6k = {'execute.return_value': ''' show", "golden_parsed_output = { \"five_sec_cpu_total\": 13, \"five_min_cpu\": 15, \"one_min_cpu\": 23, \"five_sec_cpu_interrupts\":", "platform_obj = ShowPlatform(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_semi_empty(self):", "0\", DESCR: \"Encryption AIM Element\" PID: AIM-VPN/SSL-3 , VID: V01,", "because it is in 'DISABLED' state '''} def test_empty(self): self.dev1", "---------- ---------- * 1 30 WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M Configuration register", "2\", \"pid\": \"WS-F6700-DFC3CXL\", \"sn\": \"SAL1214LAG5\", \"vid\": \"V01\", } } },", "\"pid\": \"WS-SUP720\", \"sn\": \"SAL11434N9G\", \"vid\": \"\", }, \"WS-F6K-PFC3BXL\": { \"descr\":", "Te2/16\", \"name\": \"Transceiver Te2/16\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170201TT\", \"vid\": \"V06", "512KB L2 Cache Last reset from s/w reset SuperLAT software", "2\", \"pid\": \"WS-F6K-DFC4-E\", \"sn\": \"SAL171846RF\", \"vid\": \"V02\", } }, \"1\":", "ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_7) def test_golden_output_8(self): self.maxDiff =", "\"AC power supply, 1400 watt 1\" PID: PWR-1400-AC , VID:", "#!/bin/env python import unittest from unittest.mock import Mock from pyats.topology", "10/100/1000mb Ethernet Rev. 3.4\" PID: WS-X6748-GE-TX , VID: V04, SN:", "def test_golden_c3850(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj =", "interface 28 Gigabit Ethernet interfaces 2 Ten Gigabit Ethernet interfaces", "def test_golden_active_opm(self): self.device = Mock(**self.golden_output_active_opm) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output =", "golden_output_8 = {'execute.return_value': ''' NAME: \"3825 chassis\", DESCR: \"3825 chassis\"", "\"vid\": \"V01\", } } }, } } }, \"WS-C6503-E-FAN 1\":", "= ShowProcessesCpuHistory(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff", "system experienced = 0 Standby failures = 0 Last switchover", "minutes System returned to ROM by power cycle at 21:57:23", "= obj.parse(slot='0') class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def test_golden(self): self.device = Mock(**self.golden_output_serdes_internal) obj", "Card 3 Rev. 1.1\", \"name\": \"switching engine sub-module of 2\",", "5min | inc CPU CPU utilization for five seconds: 13%/0%;", "Oct 17 2018 18:57:18 +00:00 e1000_bia.txt 2142715904 bytes total (1989595136", "test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with", "(Read/Write) Configuration register is 0x0'''} golden_parsed_output_ios = { 'version': {'bootldr':", "'license_type': 'Permanent', 'main_mem': '262144', 'mem_size': {'flash-simulated non-volatile configuration': '512'}, 'next_reload_license_level':", "parsed_output = obj.parse(rp='active', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def", "\"0x0\", \"rom\": \"Bootstrap program is IOSv\", \"uptime\": \"1 day, 16", "Ethernet': '14', 'FastEthernet': '1', 'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet':", "\"WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6\" PID: WS-SUP720-3BXL", "= ShowPlatform(device=self.dev2) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden_c3850(self): self.maxDiff", "self.maxDiff = None self.dev_iosv = Mock(**self.device_output) version_obj = ShowVersion(device=self.dev_iosv) parsed_output", "PID: CLK-7600 , VID: , SN: FXS170802GL NAME: \"1\", DESCR:", "of memory. Processor board ID FDO2028F1WK Last reset from power-on", "'name': 'TenGigabitEthernet1/1/1', 'pid': 'SFP-10G-SR', 'sn': 'SPC1519005V', 'vid': 'V03 ', },", "iotype='ipm') self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_golden_active_opm(self): self.maxDiff = None self.device =", "redundancy_obj = ShowRedundancy(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = redundancy_obj.parse() def test_golden_iosv(self):", "W0) with 262144K bytes of memory. Processor board ID FDO2028F1WK", "Cisco 6500 4-slot Chassis System\" PID: WS-C6504-E , VID: V01,", "SIMM with four DSPs on Slot 0 SubSlot 4', 'pid':", "By using this product you agree to comply with applicable", "1400 watt 1\", \"pid\": \"PWR-1400-AC\", \"vid\": \"V01\", \"sn\": \"ABC0830J127\", }", "self.maxDiff = None self.device = Mock(**self.golden_output_active) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device)", "V05 , SN: FOC16050QP6 NAME: \"Two-Port Fast Ethernet High Speed", "Mock(**self.golden_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class", "}, }, } golden_output_6 = {'execute.return_value': ''' NAME: \"1\", DESCR:", "3', 'pid': 'HWIC-2FE', 'sn': 'FOC16062824', 'vid': 'V02 ', }, },", "10Gbase-SR Te2/4\", \"name\": \"Transceiver Te2/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T5\", \"vid\":", "SN: LIT03728KKK NAME: \"Switch 1 - FlexStackPlus Module\", DESCR: \"Stacking", "\"IOS\", \"version_short\": \"12.2\", \"platform\": \"s72033_rp\", \"version\": \"12.2(18)SXF7\", \"image_id\": \"s72033_rp-ADVENTERPRISEK9_WAN-M\", 'compiled_by':", "'ipbasek9', 'license_type': 'Permanent', 'next_reload_license_level': 'ipbasek9', }, 'security': { 'license_level': 'securityk9',", "days, 10 hours, 27 minutes System returned to ROM by", "Technology Technology-package Technology-package Current Type Next reboot ------------------------------------------------------------------ ipbase ipbasek9", "self.maxDiff = None self.device = Mock(**self.golden_output_9) obj = ShowInventory(device=self.device) parsed_output", "'next_reload_license_level': 'ipservices', 'chassis': 'WS-C3750X-24S', 'main_mem': '524288', 'processor_type': 'PowerPC405', 'rtr_type': 'WS-C3750X-24S',", "platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch_detail(test_show_switch_detail_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj", "1: { \"invoked\": 3321960, \"usecs\": 109, \"tty\": 0, \"one_min_cpu\": 0.54,", "(revision 1.4) with 983008K/65536K bytes of memory. Processor board ID", "Configuration register = 0x0 Peer (slot: 0) information is not", "'0x2102', 'hostname': 'best-c3945-IOS3', 'image_id': 'C3900-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'Reload", ", VID: V06 , SN: ONT170202T5 NAME: \"Transceiver Te2/5\", DESCR:", "\"Transceiver Te1/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T1\", \"vid\": \"V06 \", }", "number : FDO1633Q14S Top Assembly Part Number : 800-33746-04 Top", "}, '4': { 'PVDM2-64': { 'descr': 'PVDMII DSP SIMM with", "\"PS 2 PWR-2700-AC/4\", \"descr\": \"2700W AC power supply for CISCO7604", "2013 00:00:00 +00:00 config 267 -rw- 147988420 Mar 29 2017", "Number : 0x04 Switch Ports Model SW Version SW Image", "1\", \"pid\": \"WS-SUP720\", \"sn\": \"SAL11434N9G\", \"vid\": \"\", }, \"WS-F6K-PFC3BXL\": {", "Supervisor Engine 2T 10GE w/ CTS Rev. 1.5\", \"pid\": \"VS-SUP2T-10G\",", "Mock(**self.golden_output_subslot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(subslot='0/1') self.maxDiff = None", "'descr': '3825 chassis', 'name': '3825 chassis', 'pid': 'CISCO3825', 'sn': 'FTX7908A3RQ',", "for CISCO7604 2\" PID: PWR-2700-AC/4 , VID: V03, SN: APS17070093", "assembly number : 800-32727-03 Daughterboard serial number : FDO172217ED System", "3 minutes System returned to ROM by power-on System restarted", "SN: SAL17152EG9 NAME: \"WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module", "Inc. Compiled Thu 23-Nov-06 06:26 by kellythw Image text-base: 0x40101040,", "', }, }, '1/0/49': { 'GLC-SX-MMD': { 'descr': '1000BaseSX SFP',", ", SN: SPC1519005V NAME: \"2\", DESCR: \"WS-C3210X-48\" PID: WS-C3210X-48T-S ,", "22-Jan-18 04:07', 'compiled_by': 'prod_rel_team', 'rom': 'Bootstrap program is C3750E boot", "Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { 'main': {", "Supply 1', 'pid': 'PWR-3900-AC', 'sn': 'QCS1604P0BT', 'vid': 'V03 ', },", "FD5678Z90P NAME: \"Switch 2 - Power Supply 1\", DESCR: \"BCA", "\"WS-C6504-E\", \"vid\": \"V01\", \"sn\": \"FXS1712Q1R8\", } } }, \"slot\": {", "'chassis_sn': 'FGL161010K8', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Fri 05-Aug-11 00:32', 'curr_config_register': '0x2102',", "', }, }, }, 'vid': 'V02 ', }, }, },", "Clock FRU 2\" PID: CLK-7600 , VID: , SN: FXS181101V4", "\"Down\", \"hw_mode\": \"Simplex\", \"communications_reason\": \"Failure\", \"standby_failures\": \"0\" }, \"slot\": {", "Te2/4\", \"name\": \"Transceiver Te2/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T5\", \"vid\": \"V06", "'Virtual Ethernet': '2', 'Gigabit Ethernet': '28', 'FastEthernet': '1' }, 'os':", "'0': { 'lc': { 'CISCO3845-MB': { 'descr': 'c3845 Motherboard with", "VID: V02, SN: DCH183500KW NAME: \"PS 1 PWR-1400-AC\", DESCR: \"AC", "memory. Base ethernet MAC Address : AC:F2:C5:FF:55:E7 Motherboard assembly number", "TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\\ TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\\ TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll", "self.device = Mock(**self.golden_output_active) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='active', status='running')", "for Cisco 3900 ISR', 'name': 'Cisco Services Performance Engine 150", "1\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS1707008Y\", } } },", "Supervisor Engine 2T 10GE w/ CTS Rev. 1.5\" PID: VS-SUP2T-10G", "Last reset from s/w reset SuperLAT software (copyright 1990 by", "ShowSwitch(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff =", "loader', 'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE", "V02, SN: DCH183500KW NAME: \"PS 1 PWR-1400-AC\", DESCR: \"AC power", "Switch Ports Model SW Version SW Image ------ ----- -----", "self.assertRaises(KeyError): parsed_output = version_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv", "X2-10GB-SR , VID: V06 , SN: ONT17020338 NAME: \"Transceiver Te2/2\",", "= Mock(**self.golden_output_9) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_9)", "test_show_processes_cpu_platform_iosxe,\\ TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\\ TestShowPlatformHardware as test_show_platform_hardware_iosxe,\\", "reload at 10:26:47 EST Mon Dec 9 2019 System restarted", "\"vid\": \"V01\", \"sn\": \"FXS1712Q1R8\", } } }, \"slot\": { \"CLK-7600", "} } }, }, } golden_output_2 = {'execute.return_value': ''' NAME:", "'descr': 'C3900 AC Power Supply 1', 'name': 'C3900 AC Power", "'3', }, 'os': 'IOS', 'platform': 'C3900', 'processor_board_flash': '2000880K', 'processor_type': 'C3900-SPE150/K9',", "9K66Z7TOKAACDEQA24N7S '''} golden_parsed_output_2 = { \"main\": { \"chassis\": { \"WS-C6504-E\":", "\"chassis\": { \"WS-C6504-E\": { \"name\": \"WS-C6504-E\", \"descr\": \"Cisco Systems Cisco", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_standby) def test_golden_standby_offline(self): self.device = Mock(**self.golden_output_standby_offline)", "{ 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk", "{ 'C3KX-PWR-350WAC': { 'descr': 'ABC Power Supply', 'name': 'Switch 1", "}, }, }, }, } golden_output_8 = {'execute.return_value': ''' NAME:", ", SN: QCS1604P0BT '''} golden_parsed_output_5 = { 'main': { 'chassis':", "parity disabled. 256K bytes of non-volatile configuration memory. 2097152K bytes", "'rp': { 'CISCO3825': { 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': {", "29-Mar-17 14:05', \"processor_type\": \"revision 1.0\", \"platform\": \"IOSv\", \"image_type\": \"production image\",", "interface daughtercard\" PID: EM-HDA-6FXO , VID: V03 , SN: FOC85389QXB", "AIM Element\" PID: AIM-VPN/SSL-3 , VID: V01, SN: FOC758693YO NAME:", "{ \"name\": \"WS-C6503-E-FAN 1\", \"descr\": \"Enhanced 3-slot Fan Tray 1\",", "\"platform\": \"IOSv\", \"image_type\": \"production image\", 'processor_board_flash': '10080K', 'returned_to_rom_by': 'reload', \"main_mem\":", "\"Switch 2 - Power Supply 1\", DESCR: \"BCA Power Supply\"", "Slot 2', 'pid': 'NM-16ESW', 'sn': 'FOC135464KO', 'subslot': { '0': {", "\"2\": { \"lc\": { \"WS-X6748-GE-TX\": { \"name\": \"2\", \"descr\": \"WS-X6748-GE-TX", "boot loader', 'rtr_type': 'WS-C3750X-24P', 'system_image': 'flash:c3750e-universalk9-mz', 'system_restarted_at': '12:22:21 PDT Mon", "SAL13516QS8 NAME: \"FAN-MOD-4HS 1\", DESCR: \"High Speed Fan Module for", "0 SubSlot 4', 'pid': 'PVDM2-64', 'sn': 'FOC63358WSI', 'vid': 'V01 ',", "ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='opm') self.assertEqual(parsed_output, self.golden_parsed_output_active_opm)", "'vid': 'V02 ', }, }, }, }, } golden_output_5 =", "platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_c3850) def test_golden_asr1k(self): self.maxDiff = None self.dev_asr1k =", "Ethernet/IEEE 802.3\": \"50\", 'Virtual Ethernet/IEEE 802.3': '1' }, \"mem_size\": {\"non-volatile", "Motherboard with Gigabit Ethernet on Slot 0', 'pid': 'CISCO3845-MB', 'sn':", "'license_type': 'Permanent', 'next_reload_license_level': 'securityk9', }, 'uc': { 'license_level': 'None', 'license_type':", "NAME: \"Transceiver Te2/3\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/3\" PID: X2-10GB-SR", "2013 00:00:00 +00:00\", \"index\": \"1\", \"size\": \"0\", \"permissions\": \"drw-\" },", "'FOC85389QXB', 'vid': 'V03 ', }, }, }, 'vid': 'V04 ',", "program is IOSv\", \"uptime\": \"1 day, 16 hours, 42 minutes\"", "self.parsed_output) class test_dir(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output", "\"name\": \"Transceiver Te2/5\", \"pid\": \"X2-10GB-SR\", \"sn\": \"AGA1515XZE2\", \"vid\": \"V05 \",", "compliance with U.S. and local country laws. By using this", "Mock(**self.golden_output_iosv) dir_obj = Dir(device=self.dev_iosv) parsed_output = dir_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class", "{ \"last_modified_date\": \"Oct 17 2018 18:57:10 +00:00\", \"index\": \"268\", \"size\":", "email to <EMAIL>. Cisco IOSv (revision 1.0) with with 435457K/87040K", "\"vid\": \"V01\", \"sn\": \"DCH170900PF\", } } }, \"PS 1 PWR-2700-AC/4\":", "supply for CISCO7604 2\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS17070093\",", "\"System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)\", \"bootldr\": \"s72033_rp Software", "\"ACTIVE\" } } } golden_output_iosv = {'execute.return_value': '''\\ Redundant System", "module for EtherSwitch NM on Slot 2 SubSlot 0', 'pid':", "9 2019 System image file is \"flash0:c3900-universalk9-mz.SPA.150-1.M7.bin\" Last reload type:", "}, }, }, '2': { 'rp': { 'WS-C3210X-48T-S': { 'descr':", "ShowPlatformPower(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff =", "Mock(**self.empty_output) redundancy_obj = ShowRedundancy(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = redundancy_obj.parse() def", "\"platform\": \"s72033_rp\", \"version\": \"12.2(18)SXF7\", \"image_id\": \"s72033_rp-ADVENTERPRISEK9_WAN-M\", 'compiled_by': 'kellythw', 'compiled_date': 'Thu", "Loader (C3750X-HBOOT-M) Version 15.2(3r)E, RELEASE SOFTWARE (fc1) R5 uptime is", "ONT170202UU NAME: \"Transceiver Te2/4\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/4\" PID:", "= obj.parse(port='0/0/0') class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def test_golden_active_opm(self): self.device = Mock(**self.golden_output_active_opm) obj", "Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you", "obj.parse(slot='0') self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_serdes_internal) def test_empty(self): self.device1", "1.8\", \"name\": \"switching engine sub-module of 1\", \"pid\": \"WS-F6K-PFC3BXL\", \"sn\":", "parsed_output = platform_obj.parse(status='active') def test_golden_active(self): self.maxDiff = None self.device =", "SubSlot 0\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk -", "} device_output = {'execute.return_value':''' best-c3945-IOS3#show version Cisco IOS Software, C3900", "Cisco CISCO3945-CHASSIS (revision 1.1) with C3900-SPE150/K9 with 2027520K/69632K bytes of", "\"V01\", \"sn\": \"SAL17152EG9\", \"subslot\": { \"0\": { \"WS-F6K-DFC4-A\": { \"descr\":", "\"hostname\": \"cat6k_tb1\", \"uptime\": \"10 weeks, 5 days, 5 hours, 16", "Mock(**self.golden_output_asr1k) platform_obj = ShowPlatform(device=self.dev_asr1k) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_asr1k) class", "Mock(**self.golden_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class", "NAME: \"3825 chassis\", DESCR: \"3825 chassis\" PID: CISCO3825 , VID:", "'sn': 'FOC758693YO', 'vid': 'V01', }, }, }, '1': { 'lc':", "{ 'FastEthernet': '2', 'Gigabit Ethernet': '3', }, 'os': 'IOS', 'platform':", "Last reset from power-on 14 Virtual Ethernet interfaces 1 FastEthernet", "Current Type Next reboot ------------------------------------------------------------------ ipbase ipbasek9 Permanent ipbasek9 security", "\"0 minutes\", \"communications\": \"Down\", \"hw_mode\": \"Simplex\", \"communications_reason\": \"Failure\", \"standby_failures\": \"0\"", "self.golden_parsed_output_8) def test_golden_output_9(self): self.maxDiff = None self.device = Mock(**self.golden_output_9) obj", "self.maxDiff = None self.device = Mock(**self.golden_output_active_ipm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device)", "AC:F2:C5:FF:55:E7 Motherboard assembly number : 73-13061-04 Motherboard serial number :", "\"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS170802GL\", } } }, \"FAN-MOD-4HS 1\":", "self.golden_parsed_output_5) def test_golden_output_6(self): self.maxDiff = None self.device = Mock(**self.golden_output_6) obj", "self.dev_iosv = Mock(**self.golden_output_ios) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output,", "6500 3-slot Chassis System\" PID: WS-C6503-E , VID: V03, SN:", "\"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/2\", \"name\": \"Transceiver Te2/2\",", "Distributed Forwarding Card 4 EARL sub-module of 3\", DESCR: \"WS-F6K-DFC4-A", "'16 Port 10BaseT/100BaseTX EtherSwitch', 'name': '16 Port 10BaseT/100BaseTX EtherSwitch on", "\"vid\": \"V04\", } } }, } } }, \"4\": {", "'ipservices', 'number_of_intfs': {'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2', 'Virtual", "\"main_mem\": \"435457\", \"mem_size\": { \"non-volatile configuration\": \"256\" }, \"system_image\": \"flash0:/vios-adventerprisek9-m\",", "NAME: \"TenGigabitEthernet2/1/1\", DESCR: \"SFP-10GBase-LR\" PID: SFP-10G-LR , VID: V02 ,", "0x2102 '''} golden_output_ios_1 = {'execute.return_value': '''\\ Cisco IOS Software, C3750E", "SN: SAL171846RF NAME: \"Transceiver Te2/1\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/1\"", "\"vid\": \"\", \"sn\": \"FXS181101V4\", } } }, \"CLK-7600 2\": {", "platform_obj = ShowSwitch(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self):", "\"WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0\" PID: WS-F6K-DFC4-A ,", "\"size\": \"0\", \"permissions\": \"drw-\" }, \"nvram\": { \"last_modified_date\": \"Oct 17", "golden_parsed_output_2 = { \"main\": { \"chassis\": { \"WS-C6504-E\": { \"name\":", "Rev. 2.6\", \"pid\": \"WS-X6748-GE-TX\", \"vid\": \"V02\", \"sn\": \"SAL1128UPQ9\", \"subslot\": {", "TestShowVersionRp as test_show_version_rp_iosxe,\\ TestShowProcessesCpu as test_show_processes_cpu_iosxe,\\ TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\\ TestShowProcessesCpuPlatform", "}, \"nvram\": { \"last_modified_date\": \"Oct 17 2018 18:57:10 +00:00\", \"index\":", "platform_obj.parse( status='active', slot='0', iotype='ipm') self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_golden_active_opm(self): self.maxDiff =", "\"sn\": \"ONT1702033D\", \"vid\": \"V06 \", } }, }, } }", "ATA System CompactFlash 0 (Read/Write) 0K bytes of ATA CompactFlash", "84:3D:C6:FF:F1:B8 Motherboard assembly number : 73-15476-04 Motherboard serial number :", "\"WS-X6816-10GE\", \"vid\": \"V02\", \"sn\": \"SAL17152QB3\", \"subslot\": { \"0\": { \"WS-F6K-DFC4-E\":", "ShowProcessesCpuPlatform(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = cpu_platform_obj.parse() class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def test_empty(self):", "Gigabit Ethernet/IEEE 802.3 interfaces 1917K bytes of non-volatile configuration memory.", "'*0': { 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6' } } }, 'license_package':", "'High Density Voice Module - 8FXS/DID on Slot 1', 'pid':", "PID: WS-SUP720 , VID: , SN: SAL11434N9G NAME: \"switching engine", "= { 'main': { 'chassis': { 'IOSv': { 'descr': 'IOSv", "= None self.device = Mock(**self.golden_output_7) obj = ShowInventory(device=self.device) parsed_output =", "cycle', \"rtr_type\": \"WS-C6503-E\", \"chassis_sn\": \"FXS1821Q2H9\", \"last_reload_reason\": \"s/w reset\", 'processor_board_flash': '65536K',", "\"ONT170202T5\", \"vid\": \"V06 \", } }, \"5\": { \"X2-10GB-SR\": {", "\"0x2102\", } } golden_output_ios_cat6k = {'execute.return_value': ''' show version Cisco", "'FastEthernet': '1', 'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2' },", "\"2821 chassis\" PID: CISCO2821 , VID: V07 , SN: FTX1234AMWT", "1 - Power Supply 1', 'pid': 'PWR-C2-2929WAC', 'sn': 'LIT03728KKK', 'vid':", "Slot 0 SubSlot 4\", DESCR: \"PVDMII DSP SIMM with four", "{ 'descr': 'Encryption AIM Element', 'name': 'Virtual Private Network (VPN)", "WS-C3750X-24P (PowerPC405) processor (revision W0) with 262144K bytes of memory.", "best-c3945-IOS3 uptime is 1 hour, 20 minutes System returned to", ", VID: V01, SN: FOC758693YO NAME: \"Clear/Subrate T3/E3 WAN on", "for EtherSwitch NM', 'name': 'Gigabit(1000BaseT) module for EtherSwitch NM on", "\"X2 Transceiver 10Gbase-SR Te2/5\" PID: X2-10GB-SR , VID: V05 ,", "ROM: System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1) BOOTLDR: s72033_rp", "Reason: Failure Current Processor Information : ------------------------------- Active Location =", "\"X2 Transceiver 10Gbase-SR Te2/2\" PID: X2-10GB-SR , VID: V06 ,", "configuration memory. 8192K bytes of packet buffer memory. 65536K bytes", "obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None", "obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_5) def test_golden_output_6(self):", "def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) version_obj =", "FXS1712Q1R8 NAME: \"CLK-7600 1\", DESCR: \"OSR-7600 Clock FRU 1\" PID:", "\"256\" }, \"system_image\": \"flash0:/vios-adventerprisek9-m\", \"curr_config_register\": \"0x0\", \"rom\": \"Bootstrap program is", "'number_of_intfs': {'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2', 'Virtual Ethernet':", "{ 'Virtual Ethernet': '14', 'FastEthernet': '1', 'Gigabit Ethernet': '28', 'Ten", "Te1/4\" PID: X2-10GB-SR , VID: V06 , SN: ONT170202T1 NAME:", "1 PWR-1400-AC\": { \"other\": { \"PS 1 PWR-1400-AC\": { \"name\":", "'sn': 'FOC16050QP6' } } }, 'license_package': { 'data': { 'license_level':", "', }, }, }, }, }, }, '1': { 'other':", "subject to United States and local country laws governing import,", "as test_show_processes_cpu_iosxe,\\ TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\\ TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\\ TestShowPlatformSoftwareStatusControlProcessorBrief as", "ShowPlatformHardwareQfpInterfaceIfnameStatistics,\\ ShowPlatformHardwareQfpStatisticsDrop,\\ ShowEnvironment,\\ ShowModule,\\ ShowSwitch, ShowSwitchDetail from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform", "self.maxDiff = None self.device = Mock(**self.golden_output_4) obj = ShowInventory(device=self.device) parsed_output", "\"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS17070093\", } } }, \"1\":", "\"1\", \"descr\": \"WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6\",", "{ \"e1000_bia.txt\": { \"last_modified_date\": \"Oct 17 2018 18:57:18 +00:00\", \"index\":", "= None self.device = Mock(**self.golden_output_active_ipm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output", "'number_of_intfs': { 'Virtual Ethernet': '14', 'FastEthernet': '1', 'Gigabit Ethernet': '28',", "\"VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1\", \"pid\":", "\"drw-\" }, \"nvram\": { \"last_modified_date\": \"Oct 17 2018 18:57:10 +00:00\",", "interfaces The password-recovery mechanism is enabled. 512K bytes of flash-simulated", "0x0 Peer (slot: 0) information is not available because it", "}, \"5\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/5\",", "\"descr\": \"WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0\", \"pid\":", "parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def test_golden_serdes(self): self.device =", "register = 0x0 Peer (slot: 0) information is not available", "'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675W3E', 'vid': 'V01 ', }, }, },", "{ 'lc': { 'SM-ES2-16-P': { 'descr': 'SM-ES2-16-P', 'name': '1', 'pid':", "1\", \"descr\": \"Enhanced 3-slot Fan Tray 1\", \"pid\": \"WS-C6503-E-FAN\", \"vid\":", "Permanent datak9 Configuration register is 0x2102 '''} parsed_output = {", "'pid': 'CISCO3845-MB', 'sn': 'FOC729346GQ', 'vid': 'V09 ', }, }, 'other':", "8FXS/DID on Slot 1\", DESCR: \"High Density Voice Module -", "\"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4\" PID: WS-X6748-GE-TX", "Systems Cisco 6500 4-slot Chassis System\" PID: WS-C6504-E , VID:", "10 weeks, 5 days, 5 hours, 16 minutes Time since", "'compiled_by': 'kellythw', 'compiled_date': 'Thu 23-Nov-06 06:26', \"image_type\": \"production image\", \"rom\":", "8192K bytes of packet buffer memory. 65536K bytes of Flash", "1.2\" PID: WS-F6K-DFC4-E , VID: V02, SN: SAL171846RF NAME: \"Transceiver", "U (2091, 3086)\" PID: WIC-1B-U-V2 , VID: V01, SN: 10293847", "\"number_of_intfs\": { \"Gigabit Ethernet/IEEE 802.3\": \"50\", 'Virtual Ethernet/IEEE 802.3': '1'", "reason: Unknown reason This product contains cryptographic features and is", "{ \"rp\": { \"VS-SUP2T-10G\": { \"name\": \"1\", \"descr\": \"VS-SUP2T-10G 5", "assistance please contact us by sending email to <EMAIL>. License", "} }, \"WS-C6503-E-FAN 1\": { \"other\": { \"WS-C6503-E-FAN 1\": {", "Port 10BaseT/100BaseTX EtherSwitch\" PID: NM-16ESW , VID: V01 , SN:", "\"\", \"sn\": \"FXS170802GL\", } } }, \"FAN-MOD-4HS 1\": { \"other\":", "\"pid\": \"WS-SUP720-3BXL\", \"vid\": \"V05\", \"sn\": \"SAL11434P2C\", \"subslot\": { \"0\": {", "DESCR: \"High Density Voice Module - 8FXS/DID\" PID: EVM-HD-8FXS/DID ,", "non-volatile configuration memory. 2097152K bytes of ATA System CompactFlash 0", "parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev = Device(name='c3850')", "Configuration register is 0xF '''} golden_parsed_output_ios_cat6k = { \"version\": {", "\"WS-SUP720-3BXL\", \"vid\": \"V05\", \"sn\": \"SAL11434P2C\", \"subslot\": { \"0\": { \"WS-SUP720\":", "self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev = Device(name='c3850') empty_output = {'execute.return_value':", "09:56 by prod_rel_team Image text-base: 0x00003000, data-base: 0x02800000 ROM: Bootstrap", "'PVDM2-64': { 'descr': 'PVDMII DSP SIMM with four DSPs', 'name':", "\"other\": { \"PS 2 PWR-2700-AC/4\": { \"name\": \"PS 2 PWR-2700-AC/4\",", "{ 'C1010X-STACK': { 'descr': 'Stacking Module', 'name': 'Switch 1 -", "1.03% 0.54% 0.48% 0 PIM Process 84 3582279 1466728 2442", "{'execute.return_value': '''\\ ROM: Bootstrap program is IOSv '''} golden_parsed_output_iosv =", "= None self.dev = Mock(**self.golden_output) obj = ShowEnvironment(device=self.dev) parsed_output =", "register is 0x2012 Standby not ready to show bootvar '''}", "is 0xF '''} golden_parsed_output_ios_cat6k = { \"version\": { \"os\": \"IOS\",", "- 8FXS/DID\" PID: EVM-HD-8FXS/DID , VID: V04 , SN: FOC65798TG8", "processor (revision A0) with 524288K bytes of memory. Processor board", "\"IOSv\", DESCR: \"IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0\"", "\"last_modified_date\": \"Oct 14 2013 00:00:00 +00:00\", \"index\": \"264\", \"size\": \"0\",", "2T 10GE w/ CTS Rev. 1.5\" PID: VS-SUP2T-10G , VID:", "}, }, }, } def test_empty(self): self.dev1 = Mock(**self.empty_output) inventory_obj", "test_golden_c3850(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowPlatform(device=self.dev_c3850)", "\"1\", DESCR: \"WS-C8888X-88\" PID: WS-C0123X-45T-S , VID: V00 , SN:", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1) class test_show_processes_cpu(test_show_processes_cpu_iosxe): def test_golden(self): self.device =", "4%/0%; one minute: 4%; five minutes: 9% PID Runtime(ms) Invoked", "VID: V03, SN: FXS1821Q2H9 NAME: \"CLK-7600 1\", DESCR: \"OSR-7600 Clock", "test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformPower(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output", "FAN-MOD-4HS , VID: V01, SN: DCH170900PF NAME: \"PS 1 PWR-2700-AC/4\",", "for five seconds: 4%/0%; one minute: 4%; five minutes: 9%", "FDO202823P8 System serial number : FDO2028F1WK Top Assembly Part Number", "\"V05\", \"sn\": \"SAL17152N0F\", \"subslot\": { \"0\": { \"VS-F6K-MSFC5\": { \"descr\":", "\"Oct 14 2013 00:00:00 +00:00\", \"index\": \"264\", \"size\": \"0\", \"permissions\":", ", VID: V01, SN: ABC0830J127 '''} golden_output_4 = {'execute.return_value': '''", "as test_show_processes_cpu_platform_iosxe,\\ TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\\ TestShowPlatformHardware as", "'main_mem': '262144', 'mem_size': {'flash-simulated non-volatile configuration': '512'}, 'next_reload_license_level': 'ipservices', 'number_of_intfs':", "= Mock(**self.golden_output_active) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='active', status='running') self.maxDiff", "us by sending email to <EMAIL>. Cisco IOSv (revision 1.0)", "2.77, \"runtime\": 3582279, \"pid\": 84, \"five_sec_cpu\": 0.55 }, 3: {", "'rp': { 'WS-C3210X-48T-S': { 'descr': 'WS-C3210X-48', 'name': '2', 'pid': 'WS-C3210X-48T-S',", "restarted at 12:22:21 PDT Mon Sep 10 2018 System image", "(VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright", "\"sn\": \"ONT170201TT\", \"vid\": \"V06 \", } }, }, } }", "'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6' } } }, 'license_package': { 'data':", "Type: Permanent Next reload license Level: ipservices cisco WS-C3750X-24P (PowerPC405)", "High Speed WAN Interface Card on Slot 0 SubSlot 3',", "= platform_obj.parse(status='active') def test_golden_active(self): self.maxDiff = None self.device = Mock(**self.golden_output_active)", "prod_rel_team Image text-base: 0x00003000, data-base: 0x02800000 ROM: Bootstrap program is", "None self.dev_iosv = Mock(**self.device_output) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse()", "\"process\": \"PIM Process\", \"five_min_cpu\": 0.48, \"runtime\": 362874, \"pid\": 368, \"five_sec_cpu\":", "Permanent ipbasek9 security securityk9 Permanent securityk9 uc None None None", "DSPs\" PID: PVDM2-64 , VID: V01 , SN: FOC63358WSI NAME:", "{'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2', 'Virtual Ethernet': '2',", "} } }, \"1\": { \"rp\": { \"WS-SUP720-3BXL\": { \"name\":", "ATA CompactFlash 1 (Read/Write) 0K bytes of ATA CompactFlash 2", "local country laws governing import, export, transfer and use. Delivery", "of packet buffer memory. 65536K bytes of Flash internal SIMM", "'descr': 'Encryption AIM Element', 'name': 'Virtual Private Network (VPN) Module", "features and is subject to United States and local country", "'2', 'Gigabit Ethernet': '28', 'FastEthernet': '1' }, 'os': 'IOS', 'platform':", "V03, SN: SAL17163901 NAME: \"Transceiver Te1/4\", DESCR: \"X2 Transceiver 10Gbase-SR", "Distributed Forwarding Card 3 Rev. 1.1\", \"name\": \"switching engine sub-module", "w/ CTS Rev. 1.5\", \"pid\": \"VS-SUP2T-10G\", \"vid\": \"V05\", \"sn\": \"SAL17152N0F\",", "'2821 chassis', 'pid': 'CISCO2821', 'sn': 'FTX1234AMWT', 'vid': 'V07 ', },", "Feature Card 4 EARL sub-module of 1\", \"pid\": \"VS-F6K-PFC4\", \"sn\":", "Rev. 4.1\" PID: WS-F6700-CFC , VID: V06, SN: SAL13516QS8 NAME:", "NAME: \"2\", DESCR: \"WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0\"", "'9 weeks, 4 days, 2 hours, 3 minutes', 'version': '12.2(55)SE8',", "V01, SN: ABC0830J127 '''} golden_output_4 = {'execute.return_value': ''' NAME: \"1\",", "pyats.topology import Device from genie.metaparser.util.exceptions import SchemaEmptyParserError,\\ SchemaMissingKeyError from genie.libs.parser.ios.show_platform", "test_empty(self): self.dev1 = Mock(**self.empty_output) inventory_obj = ShowInventory(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "SN: ONT182746GZ NAME: \"1\", DESCR: \"WS-C1010XR-48FPS-I\" PID: WS-C1010XR-48FPS-I, VID: V05", "MAC Address : AC:F2:C5:FF:55:E7 Motherboard assembly number : 73-13061-04 Motherboard", "\"WS-F6K-DFC4-E\", \"sn\": \"SAL171846RF\", \"vid\": \"V02\", } }, \"1\": { \"X2-10GB-SR\":", "PWR-2700-AC/4\": { \"other\": { \"PS 1 PWR-2700-AC/4\": { \"name\": \"PS", "Power Supply 1\", DESCR: \"BCA Power Supply\" PID: C3KX-PWR-007CBA ,", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_6) def test_golden_output_7(self): self.maxDiff = None", "Mock(**self.golden_output_ios_1) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_1) def", "0\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1\"", "NAME: \"Transceiver Te2/1\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/1\" PID: X2-10GB-SR", "\"WS-C6504-E\", \"descr\": \"Cisco Systems Cisco 6500 4-slot Chassis System\", \"pid\":", "\"WS-C6503-E\": { \"name\": \"WS-C6503-E\", \"descr\": \"Cisco Systems Catalyst 6500 3-slot", "buffer memory. 65536K bytes of Flash internal SIMM (Sector size", "test_show_platform_power(test_show_platform_power_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformPower(device=self.device) with", "{ 'CISCO3825': { 'descr': '3825 chassis', 'name': '3825 chassis', 'pid':", "1\", DESCR: \"High Density Voice Module - 8FXS/DID\" PID: EVM-HD-8FXS/DID", "\"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\" PID: VS-F6K-MSFC5 , VID: ,", "= ShowProcessesCpuSorted(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff", "license Level: ipservices cisco WS-C3750X-24S (PowerPC405) processor (revision A0) with", "Supply', 'name': 'Switch 1 - Power Supply 1', 'pid': 'C3KX-PWR-350WAC',", "PID: WS-F6700-DFC3CXL , VID: V01, SN: SAL1214LAG5 NAME: \"WS-C6503-E-FAN 1\",", "'curr_config_register': '0xF' } } device_output = {'execute.return_value':''' best-c3945-IOS3#show version Cisco", "seconds: 13%/0%; one minute: 23%; five minutes: 15% '''} golden_parsed_output_1", "\"CLK-7600 2\": { \"other\": { \"CLK-7600 2\": { \"name\": \"CLK-7600", "}, \"boot\": { \"last_modified_date\": \"Jan 30 2013 00:00:00 +00:00\", \"index\":", "Te1/4\", \"name\": \"Transceiver Te1/4\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T1\", \"vid\": \"V06", "\"bootldr\": \"s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)\", \"hostname\":", "= Mock(**self.empty_output) platform_obj = ShowProcessesCpuHistory(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse()", "\"sn\": \"SAL11434P2C\", \"subslot\": { \"0\": { \"WS-SUP720\": { \"descr\": \"WS-SUP720", "\"Mar 29 2017 00:00:00 +00:00\", \"index\": \"267\", \"size\": \"147988420\", \"permissions\":", "Slot 0 SubSlot 1\", DESCR: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex", "}, } golden_output_3 = {'execute.return_value': ''' # show inventory NAME:", "V02, SN: SAL171846RF NAME: \"Transceiver Te2/1\", DESCR: \"X2 Transceiver 10Gbase-SR", "use. Delivery of Cisco cryptographic products does not imply third-party", "72 bits wide with parity enabled. 255K bytes of non-volatile", "self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) platform_obj = ShowBootvar(device=self.dev_iosv) parsed_output", "License Info: License UDI: ------------------------------------------------- Device# PID SN ------------------------------------------------- *0", "CLEI Code Number : CMMFF00ARC Hardware Board Revision Number :", "'', }, }, }, }, } golden_output_7 = {'execute.return_value': '''", "'returned_to_rom_by': 'reload', \"main_mem\": \"435457\", \"mem_size\": { \"non-volatile configuration\": \"256\" },", "= obj.parse() class test_show_version_rp(test_show_version_rp_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj", "CEF720 16 port 10GE Rev. 2.0\" PID: WS-X6816-10GE , VID:", "= obj.parse() def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output)", "2.0\" PID: VS-F6K-PFC4 , VID: V03, SN: SAL17163901 NAME: \"Transceiver", "\"version_short\": \"12.2\", \"platform\": \"s72033_rp\", \"version\": \"12.2(18)SXF7\", \"image_id\": \"s72033_rp-ADVENTERPRISEK9_WAN-M\", 'compiled_by': 'kellythw',", "= version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios) def test_golden_ios_cat6k(self): self.maxDiff = None self.dev_iosv", "reload reason: Unknown reason This product contains cryptographic features and", "chassis', 'pid': 'CISCO3825', 'sn': 'FTX7908A3RQ', 'vid': 'V05 ', }, },", "(fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2017 by Cisco Systems,", "'sn': 'FOC09876NP3', 'vid': '', }, }, }, }, } golden_output_7", "368 362874 3321960 109 1.03% 0.54% 0.48% 0 PIM Process", "VID: V02 , SN: FD232323XXZ NAME: \"GigabitEthernet1/0/49\", DESCR: \"1000BaseSX SFP\"", "\"V06\", } } }, } } }, }, } golden_output_2", "golden_parsed_output_1 = { \"sort\": { 1: { \"invoked\": 3321960, \"usecs\":", "OSPF-1 Hello '''} def test_empty(self): self.dev = Mock(**self.empty_output) obj =", "(C3750X-HBOOT-M) Version ' '15.2(3r)E, RELEASE SOFTWARE (fc1)', 'chassis': 'WS-C3750X-24P', 'chassis_sn':", "products does not imply third-party authority to import, export, distribute", "Mock(**self.golden_output_2) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_2) def", "on) System image file is \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\" This product contains cryptographic", "ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='standby', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_standby)", "parsed_output = obj.parse(rp='standby', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_standby) def", "12.2(55)SE8, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2013", "for CISCO7604 1\", \"pid\": \"PWR-2700-AC/4\", \"vid\": \"V03\", \"sn\": \"APS1707008Y\", }", "weeks, 4 days, 2 hours, 3 minutes System returned to", "'1': { 'other': { 'EVM-HD-8FXS/DID': { 'descr': 'High Density Voice", "self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_golden_standby(self): self.device = Mock(**self.golden_output_standby) obj = ShowVersionRp(device=self.device)", "----- ---------- ---------- * 1 30 WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M Configuration", "ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5\", \"pid\":", "(c) 1986-2011 by Cisco Systems, Inc. Compiled Fri 05-Aug-11 00:32", "Mon Dec 9 2019', 'returned_to_rom_by': 'reload', 'rom': 'System Bootstrap, Version", "\"OSR-7600 Clock FRU 1\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\",", "{ \"slot 0\": { \"image_ver\": \"Cisco IOS Software, IOSv Software", "\"switching engine sub-module of 2\", DESCR: \"WS-F6700-DFC3CXL Distributed Forwarding Card", "'WS-C3210X-48T-S': { 'descr': 'WS-C3210X-48', 'name': '2', 'pid': 'WS-C3210X-48T-S', 'sn': 'FD5678Z90P',", "\"Encryption AIM Element\" PID: AIM-VPN/SSL-2 , VID: V01, SN: FOC2837465E", "SAL171846RF NAME: \"Transceiver Te2/1\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/1\" PID:", "'V07 ', }, }, }, 'slot': { '0': { 'other':", "WAN', 'name': 'Clear/Subrate T3/E3 WAN on Slot 1', 'pid': 'NM-1T3/E3=',", "'ONT182746GZ', 'vid': 'V02 ', }, }, }, 'vid': 'V02 ',", "minute: 4%; five minutes: 9% PID Runtime(ms) Invoked uSecs 5Sec", "}, }, '1/1/1': { 'SFP-10G-SR': { 'descr': 'SFP-10GBase-SR', 'name': 'TenGigabitEthernet1/1/1',", "'SFP-10G-SR', 'sn': 'SPC1519005V', 'vid': 'V03 ', }, }, }, 'vid':", "\"WS-F6K-DFC4-A\", \"sn\": \"SAL171848KL\", \"vid\": \"V04\", } } }, } }", "ONT17020338 NAME: \"Transceiver Te2/2\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/2\" PID:", "'hostname': 'R5', 'image_id': 'C3750E-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'power-on', 'license_level':", "slot 0 Current Software state = ACTIVE Uptime in current", "= ShowPlatformHardware(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def", "} }, } } }, \"2\": { \"lc\": { \"WS-X6748-GE-TX\":", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1) class test_show_processes_cpu(test_show_processes_cpu_iosxe): def test_golden(self): self.device = Mock(**self.golden_output)", "RELEASE SOFTWARE (fc1) BOOTLDR: s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE", "\"vid\": \"V05\", \"sn\": \"SAL11434P2C\", \"subslot\": { \"0\": { \"WS-SUP720\": {", "\"descr\": \"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\", \"name\": \"msfc sub-module of", "SN: FXS181101V4 NAME: \"1\", DESCR: \"WS-SUP720-3BXL 2 ports Supervisor Engine", "1 30 WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''}", "\"X2-10GB-SR\", \"sn\": \"ONT1702020H\", \"vid\": \"V06 \", } }, \"3\": {", "test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev)", "2019', 'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'next_reload_license_level':", "Systems, Inc. Compiled Mon 22-Jan-18 04:07 by prod_rel_team ROM: Bootstrap", "\"Transceiver Te2/6\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/6\" PID: X2-10GB-SR ,", "2010 (SP by power on) System image file is \"disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7\"", "{ \"lc\": { \"WS-X6816-10GE\": { \"name\": \"2\", \"descr\": \"WS-X6816-10GE CEF720", "\"X2 Transceiver 10Gbase-SR Te2/3\" PID: X2-10GB-SR , VID: V06 ,", "}, }, '2': { 'rp': { 'WS-C3210X-48T-S': { 'descr': 'WS-C3210X-48',", "number : FDO202823P8 System serial number : FDO2028F1WK Top Assembly", "15.2(2)E8, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2018", "ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\\ TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\\ TestShowEnv as test_show_env_iosxe,\\ TestShowModule", "*0 C3900-SPE150/K9 FOC16050QP6 Technology Package License Information for Module:'c3900' -----------------------------------------------------------------", "WAN on Slot 1\", DESCR: \"Clear/Subrate T3/E3 WAN\" PID: NM-1T3/E3=", "by prod_rel_team ROM: Bootstrap program is IOSv N95_1 uptime is", "\"permissions\": \"drw-\" }, \"vios-adventerprisek9-m\": { \"last_modified_date\": \"Mar 29 2017 00:00:00", "Slot 1', 'pid': 'EVM-HD-8FXS/DID', 'sn': 'FOC65798TG8', 'subslot': { '1': {", "\"descr\": \"WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0\", \"name\": \"WS-F6K-DFC4-A", "\"runtime\": 362874, \"pid\": 368, \"five_sec_cpu\": 1.03 }, 2: { \"invoked\":", "VID: V01 , SN: FOC63358WSI NAME: \"High Density Voice Module", "'PowerPC405', 'returned_to_rom_by': 'power-on', 'rom': 'Bootstrap program is C3750E boot loader',", "test_show_processes_cpu_iosxe,\\ TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\\ TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\\ TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\\", "VID: V06 , SN: ONT170202T1 NAME: \"Transceiver Te1/5\", DESCR: \"X2", "def test_golden_active_opm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_opm) platform_obj =", "\"permissions\": \"-rw-\" } }, \"bytes_total\": \"2142715904\", \"bytes_free\": \"1989595136\" }, \"dir\":", "minutes Time since cat6k_tb1 switched to active is 10 weeks,", "day, 16 hours, 42 minutes\" } } golden_output_iosv = {'execute.return_value':", "2', 'pid': 'NM-16ESW', 'sn': 'FOC135464KO', 'subslot': { '0': { 'GE-DCARD-ESW':", ": V07 CLEI Code Number : CMMPP00DRB Hardware Board Revision", "ABC0830J127 '''} golden_output_4 = {'execute.return_value': ''' NAME: \"1\", DESCR: \"WS-C8888X-88\"", "parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def test_golden(self): self.device", "C3750E boot loader', 'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE,", "= {'execute.return_value': '''\\ NAME: \"IOSv\", DESCR: \"IOSv chassis, Hw Serial#:", "{ 'license_level': 'None', 'license_type': 'None', 'next_reload_license_level': 'None', }, }, 'main_mem':", "Mon Sep 10 2018', 'uptime': '9 weeks, 4 days, 2", "sub-module of 2\", \"pid\": \"WS-F6700-DFC3CXL\", \"sn\": \"SAL1214LAG5\", \"vid\": \"V01\", }", "IOSv '''} golden_parsed_output_iosv = { \"version\": { \"last_reload_reason\": \"Unknown reason\",", "ShowPlatformHardwareQfpBqsOpmMapping,\\ ShowPlatformHardwareQfpBqsIpmMapping,\\ ShowPlatformHardwareSerdes,\\ ShowPlatformHardwareSerdesInternal,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\\ ShowPlatformHardwareQfpStatisticsDrop,\\ ShowEnvironment,\\ ShowModule,\\ ShowSwitch,", "'1': { 'lc': { 'SM-ES2-16-P': { 'descr': 'SM-ES2-16-P', 'name': '1',", "3-slot Fan Tray 1\" PID: WS-C6503-E-FAN , VID: V02, SN:", "weeks, 3 days, 10 hours, 27 minutes System returned to", "are unable to comply with U.S. and local laws, return", ": WS-C3750X-24S-E Daughterboard assembly number : 800-32727-03 Daughterboard serial number", "{ '1': { 'rp': { 'WS-C0123X-45T-S': { 'descr': 'WS-C8888X-88', 'name':", "'FOC758693YO', 'vid': 'V01', }, }, }, '1': { 'lc': {", "of 1\", \"pid\": \"VS-F6K-MSFC5\", \"sn\": \"SAL17142D06\", \"vid\": \"\", }, \"VS-F6K-PFC4\":", "self.device = Mock(**self.golden_output_9) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output,", "(fc1)\", \"bootldr\": \"s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)\",", "import TestShowPlatform as test_show_platform_iosxe,\\ TestShowPlatformPower as test_show_platform_power_iosxe,\\ TestShowVersionRp as test_show_version_rp_iosxe,\\", "SN: FOC91864MNN '''} golden_parsed_output_9 = { 'main': { 'chassis': {", "'rp': { 'WS-C0123X-45T-S': { 'descr': 'WS-C8888X-88', 'name': '1', 'pid': 'WS-C0123X-45T-S',", "Rev. 5.6\" PID: WS-SUP720-3BXL , VID: V05, SN: SAL11434P2C NAME:", "test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\\ TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\\ TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\\ TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\\", "NAME: \"PS 1 PWR-1400-AC\", DESCR: \"AC power supply, 1400 watt", "'sn': 'FGL161010K8', 'vid': 'V05 ', }, }, }, 'slot': {", "\"X2 Transceiver 10Gbase-SR Te2/6\", \"name\": \"Transceiver Te2/6\", \"pid\": \"X2-10GB-SR\", \"sn\":", "inc CPU CPU utilization for five seconds: 13%/0%; one minute:", "Sep 10 2019 System image file is \"flash:c3750e-universalk9-mz.152-2.E8.bin\" Last reload", "}, 'main_mem': '2027520', 'mem_size': { 'non-volatile configuration': '255', }, 'number_of_intfs':", "10GE w/ CTS Rev. 1.5\", \"pid\": \"VS-SUP2T-10G\", \"vid\": \"V05\", \"sn\":", "}, \"1\": { \"rp\": { \"WS-SUP720-3BXL\": { \"name\": \"1\", \"descr\":", "3582279, \"pid\": 84, \"five_sec_cpu\": 0.55 }, 3: { \"invoked\": 116196,", ": CMMPP00DRB Hardware Board Revision Number : 0x05 Switch Ports", "= Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { \"active\":", "'CISCO3845': { 'descr': '3845 chassis', 'name': '3845 chassis', 'pid': 'CISCO3845',", "parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_5) def test_golden_output_6(self): self.maxDiff = None", "15.6(3)M2, RELEASE SOFTWARE (fc2)\", \"uptime_in_curr_state\": \"1 day, 16 hours, 42", "= { \"main\": { \"chassis\": { \"WS-C6504-E\": { \"name\": \"WS-C6504-E\",", "800-33746-04 Top Assembly Revision Number : B0 Version ID :", "{ '0': { 'lc': { 'CISCO3845-MB': { 'descr': 'c3845 Motherboard", "bytes of ATA CompactFlash 3 (Read/Write) Configuration register is 0x0'''}", "interface daughtercard on Slot 1 SubSlot 1', 'pid': 'EM-HDA-6FXO', 'sn':", "PID: C3900-SPE150/K9 , VID: V05 , SN: FOC16050QP6 NAME: \"Two-Port", "Policy Feature Card 4 EARL sub-module of 1\", DESCR: \"VS-F6K-PFC4", "golden_parsed_output_iosv = { 'main': { 'chassis': { 'IOSv': { 'descr':", "DESCR: \"X2 Transceiver 10Gbase-SR Te2/6\" PID: X2-10GB-SR , VID: V06", "3\", \"pid\": \"WS-F6K-DFC4-A\", \"sn\": \"SAL171848KL\", \"vid\": \"V04\", } } },", "}, \"2\": { \"lc\": { \"WS-X6816-10GE\": { \"name\": \"2\", \"descr\":", "is 0xF '''} golden_parsed_output_ios_1 = { 'version': {'version_short': '15.2', 'platform':", "Speed WAN Interface Card', 'name': 'Two-Port Fast Ethernet High Speed", "'1': { 'WIC-1B-U-V2': { 'descr': 'Wan Interface Card BRI U", "\"version\": { \"os\": \"IOS\", \"version_short\": \"12.2\", \"platform\": \"s72033_rp\", \"version\": \"12.2(18)SXF7\",", "SIMM with four DSPs on Slot 0 SubSlot 4\", DESCR:", "Address : 84:3D:C6:FF:F1:B8 Motherboard assembly number : 73-15476-04 Motherboard serial", "test_golden(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) platform_obj = ShowBootvar(device=self.dev_iosv)", "Supply 1\" PID: PWR-3900-AC , VID: V03 , SN: QCS1604P0BT", "class test_show_module(test_show_module_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowModule(device=self.dev1)", "= Mock(**self.golden_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) parsed_output = platform_obj.parse( status='active',", "2 PWR-2700-AC/4\": { \"name\": \"PS 2 PWR-2700-AC/4\", \"descr\": \"2700W AC", "\"descr\": \"X2 Transceiver 10Gbase-SR Te1/4\", \"name\": \"Transceiver Te1/4\", \"pid\": \"X2-10GB-SR\",", "\"FXS170802GL\", } } }, \"CLK-7600 2\": { \"other\": { \"CLK-7600", "{ '1': { 'C3KX-PWR-350WAC': { 'descr': 'ABC Power Supply', 'name':", "\"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702033D\", \"vid\": \"V06 \", } }, },", "is 10 weeks, 5 days, 5 hours, 15 minutes System", "= Mock(**self.semi_empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaMissingKeyError): parsed_output = dir_obj.parse()", "chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0', 'name': 'IOSv', 'pid':", "self.golden_parsed_output_4) def test_golden_output_5(self): self.maxDiff = None self.device = Mock(**self.golden_output_5) obj", "please contact us by sending email to <EMAIL>. Cisco IOSv", "dir_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) dir_obj", "SubSlot 1', 'pid': 'EM-HDA-6FXO', 'sn': 'FOC85389QXB', 'vid': 'V03 ', },", "Level: ipservices License Type: Permanent Next reload license Level: ipservices", "obj = ShowPlatformHardwareSerdesInternal(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe):", "DESCR: \"WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8\" PID: WS-F6K-PFC3BXL", "'C1010X-STACK', 'sn': 'FD232323XXZ', 'vid': 'V02 ', }, 'PWR-C2-2929WAC': { 'descr':", "\"0\": { \"WS-F6K-DFC4-A\": { \"descr\": \"WS-F6K-DFC4-A Distributed Forwarding Card 4", "sub-module of 2\", DESCR: \"WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev.", ", VID: V06 , SN: ONT1702033D NAME: \"2\", DESCR: \"WS-X6816-10GE", "4, \"nonzero_cpu_processes\": [ \"PIM Process\", \"IOSv e1000\", \"OSPF-1 Hello\" ],", ", SN: FOC65798TG8 NAME: \"Six port FXO voice interface daughtercard", "{ 'descr': 'Two-Port Fast Ethernet High Speed WAN Interface Card',", "self.golden_parsed_output_asr1k) class test_show_platform_power(test_show_platform_power_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj =", ": FDO172217ED System serial number : FDO1633Q14S Top Assembly Part", "10:26:47 EST Mon Dec 9 2019 System restarted at 10:27:57", "'datak9', 'license_type': 'Permanent', 'next_reload_license_level': 'datak9', }, 'ipbase': { 'license_level': 'ipbasek9',", "29-Mar-17 14:05 by prod_rel_team ROM: Bootstrap program is IOSv N95_1", "V05, SN: SAL11434P2C NAME: \"msfc sub-module of 1\", DESCR: \"WS-SUP720", "UTC Sat Aug 28 2010 (SP by power on) System", "= {'execute.return_value': ''} golden_parsed_output_iosv = { \"red_sys_info\": { \"last_switchover_reason\": \"unsupported\",", "V01 , SN: FOC98675U0D NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex", "FOC98675U0D NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1", "\"4\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te1/4\", \"name\":", "ShowPlatformHardware(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def test_golden_port(self):", "= Mock(**self.golden_output_subslot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(subslot='0/1') self.maxDiff =", "\"uptime_in_curr_state\": \"1 day, 16 hours, 42 minutes\", \"config_register\": \"0x0\", \"curr_sw_state\":", "2\", DESCR: \"OSR-7600 Clock FRU 2\" PID: CLK-7600 , VID:", "\"sn\": \"ONT170202T1\", \"vid\": \"V06 \", } }, \"5\": { \"X2-10GB-SR\":", "of 2\", DESCR: \"WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1\"", "Inc. Compiled Wed 26-Jun-13 09:56 by prod_rel_team Image text-base: 0x00003000,", "\"main_mem\": \"983008\", \"processor_type\": \"R7000\", 'sp_by': 'power on', 'returned_to_rom_at': '21:57:23 UTC", ": F0 Version ID : V07 CLEI Code Number :", "524288K bytes of memory. Processor board ID FDO1633Q14S Last reset", "= obj.parse(slot='0', internal=True) self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_slot_internal) def", ", VID: V03, SN: APS1707008Y NAME: \"PS 2 PWR-2700-AC/4\", DESCR:", "'Wed 26-Jun-13 09:56', 'hostname': 'R5', 'image_id': 'C3750E-UNIVERSALK9-M', 'image_type': 'production image',", "Copyright (c) 1986-2006 by cisco Systems, Inc. Compiled Thu 23-Nov-06", "} }, \"five_sec_cpu_total\": 4, \"five_min_cpu\": 9, \"one_min_cpu\": 4, \"nonzero_cpu_processes\": [", "PWR-2700-AC/4 , VID: V03, SN: APS1707008Y NAME: \"PS 2 PWR-2700-AC/4\",", "= {'execute.return_value': ''' NAME: \"1\", DESCR: \"SM-ES2-16-P\" PID: SM-ES2-16-P ,", "= {'execute.return_value': ''' NAME: \"2821 chassis\", DESCR: \"2821 chassis\" PID:", "---------- * 1 30 WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M Configuration register is", "= inventory_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_output_2(self): self.maxDiff = None self.device", "test_golden_active_ipm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_ipm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll(", "'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675U0D', 'vid': 'V01 ', }, }, '1': {", "1', 'pid': 'C3KX-PWR-350WAC', 'sn': 'DTN1504L0E9', 'vid': 'V01D ', }, },", "}, 'os': 'IOS', 'platform': 'C3900', 'processor_board_flash': '2000880K', 'processor_type': 'C3900-SPE150/K9', 'returned_to_rom_at':", "\"147988420\", \"permissions\": \"-rw-\" } }, \"bytes_total\": \"2142715904\", \"bytes_free\": \"1989595136\" },", "test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaMissingKeyError): parsed_output", "\"version\": \"15.6(3)M2\", \"rtr_type\": \"IOSv\", \"chassis_sn\": \"9K66Z7TOKAACDEQA24N7S\", \"chassis\": \"IOSv\", \"image_id\": \"VIOS-ADVENTERPRISEK9-M\",", "self.golden_parsed_output_ios_cat6k) def test_golden_ios_1(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_1) version_obj", "'name': '3825 chassis', 'pid': 'CISCO3825', 'sn': 'FTX7908A3RQ', 'vid': 'V05 ',", "Private Network (VPN) Module DRAM configuration is 72 bits wide", "state = 1 day, 16 hours, 42 minutes Image Version", "'vid': 'V02L ', }, }, '1/0/49': { 'GLC-SX-MMD': { 'descr':", "Compiled Wed 29-Mar-17 14:05 by prod_rel_team ROM: Bootstrap program is", "governing import, export, transfer and use. Delivery of Cisco cryptographic", "obj.parse(port='0/0/0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_port) def test_golden_slot(self): self.device =", "'pid': 'WS-C3210X-48T-S', 'sn': 'FD5678Z90P', 'subslot': { '2': { 'C3KX-PWR-007CBA': {", "\"version_short\": \"15.6\", \"number_of_intfs\": { \"Gigabit Ethernet\": \"6\" }, \"version\": \"15.6(3)M2\",", "disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE variable = BOOTLDR variable = Configuration register is", "Engine 150 for Cisco 3900 ISR', 'name': 'Cisco Services Performance", "GE-DCARD-ESW , VID: V01 , SN: FOC91864MNN '''} golden_parsed_output_9 =", "to ROM by reload System image file is \"flash0:/vios-adventerprisek9-m\" Last", "\"IOSv\", \"chassis_sn\": \"9K66Z7TOKAACDEQA24N7S\", \"chassis\": \"IOSv\", \"image_id\": \"VIOS-ADVENTERPRISEK9-M\", 'compiled_by': 'prod_rel_team', 'compiled_date':", "\"descr\": \"VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0\", \"name\": \"VS-F6K-PFC4", "ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1) class test_show_processes_cpu(test_show_processes_cpu_iosxe): def test_golden(self):", "self.assertEqual(parsed_output, self.golden_parsed_output_standby) def test_golden_standby_offline(self): self.device = Mock(**self.golden_output_standby_offline) obj = ShowVersionRp(device=self.device)", "524288 Oct 17 2018 18:57:10 +00:00 nvram 269 -rw- 119", "NAME: \"Virtual Private Network (VPN) Module on Slot 0\", DESCR:", "\"vid\": \"\", \"sn\": \"FXS170802GL\", } } }, \"FAN-MOD-4HS 1\": {", "Network (VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-3', 'sn': 'FOC758693YO',", "Card on Slot 0 SubSlot 3', 'pid': 'HWIC-2FE', 'sn': 'FOC16062824',", "24 port 1000mb SFP Rev. 1.0\", \"pid\": \"WS-X6824-SFP\", \"vid\": \"V01\",", "RELEASE SOFTWARE (fc1)\", \"bootldr\": \"s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE", "\"WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2\" PID: WS-F6K-DFC4-E ,", "'System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)', 'rtr_type': 'CISCO3945-CHASSIS', 'system_image':", "'Clear/Subrate T3/E3 WAN on Slot 1', 'pid': 'NM-1T3/E3=', 'sn': 'FOC28476ADM',", "= Mock(**self.golden_output) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse(key_word='CPU', sort_time='5min') self.assertEqual(parsed_output,", "}, \"16\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/16\",", "= ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_6) def test_golden_output_7(self): self.maxDiff", "{ \"descr\": \"X2 Transceiver 10Gbase-SR Te1/4\", \"name\": \"Transceiver Te1/4\", \"pid\":", "0) information is not available because it is in 'DISABLED'", "= Mock(**self.empty_output) redundancy_obj = ShowRedundancy(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = redundancy_obj.parse()", "with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(rp='standby', status='running') def test_empty(self): self.device1 =", "None self.device = Mock(**self.golden_output_1) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse(key_word='process')", "SAL11434LYG NAME: \"2\", DESCR: \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet", "Mock(**self.empty_output) obj = ShowPlatformHardware(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class", "'V02 ', }, }, }, 'vid': 'V02 ', }, },", ", VID: V02, SN: SAL17152QB3 NAME: \"WS-F6K-DFC4-E Distributed Forwarding Card", "9% PID Runtime(ms) Invoked uSecs 5Sec 1Min 5Min TTY Process", "as test_show_version_rp_iosxe,\\ TestShowProcessesCpu as test_show_processes_cpu_iosxe,\\ TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\\ TestShowProcessesCpuPlatform as", "test_golden_asr1k(self): self.maxDiff = None self.dev_asr1k = Mock(**self.golden_output_asr1k) platform_obj = ShowPlatform(device=self.dev_asr1k)", "sub-module of 3\", DESCR: \"WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev.", "golden_parsed_output_ios = { 'version': {'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version", "}, } } }, }, } golden_output_2 = {'execute.return_value': '''", "------------------------------ Available system uptime = 0 minutes Switchovers system experienced", "{ \"name\": \"4\", \"descr\": \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet", "Technology Package License Information for Module:'c3900' ----------------------------------------------------------------- Technology Technology-package Technology-package", "self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output =", "Module on Slot 0', 'pid': 'AIM-VPN/SSL-3', 'sn': 'FOC758693YO', 'vid': 'V01',", "'AIM-VPN/SSL-2', 'sn': 'FOC2837465E', 'vid': 'V01', 'subslot': { '0': { 'VWIC2-2MFT-T1/E1':", "0, \"one_min_cpu\": 0.54, \"process\": \"PIM Process\", \"five_min_cpu\": 0.48, \"runtime\": 362874,", "self.golden_parsed_output) def test_empty(self): self.device1 = Mock(**self.empty_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1) with", "5 days, 5 hours, 16 minutes Time since cat6k_tb1 switched", "self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) version_obj = ShowVersion(device=self.dev_iosv) parsed_output", "\"sort\": { 1: { \"invoked\": 3321960, \"usecs\": 109, \"tty\": 0,", "Chassis System\" PID: WS-C6504-E , VID: V01, SN: FXS1712Q1R8 NAME:", "1\": { \"other\": { \"FAN-MOD-4HS 1\": { \"name\": \"FAN-MOD-4HS 1\",", "Motherboard assembly number : 73-13061-04 Motherboard serial number : FDO1633Q14M", "Rev. 1.0\", \"pid\": \"WS-X6824-SFP\", \"vid\": \"V01\", \"sn\": \"SAL17152EG9\", \"subslot\": {", "Hw Revision: 1.0\" PID: IOSv , VID: 1.0, SN: 9K66Z7TOKAACDEQA24N7S", "------ ----- ----- ---------- ---------- * 1 30 WS-C3750X-24S 15.2(2)E8", "'vid': 'V01 ', }, }, '1': { 'WIC-1B-U-V2': { 'descr':", "'V02 ', }, }, }, }, } golden_output_5 = {'execute.return_value':", "'name': 'PVDMII DSP SIMM with four DSPs on Slot 0", "C3750E Boot Loader (C3750X-HBOOT-M) Version 15.2(3r)E, RELEASE SOFTWARE (fc1) R5", "self.dev1 = Mock(**self.empty_output) platform_obj = ShowModule(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "1\", DESCR: \"Clear/Subrate T3/E3 WAN\" PID: NM-1T3/E3= , VID: V01", "'FOC98675W3E', 'vid': 'V01 ', }, }, }, }, }, },", "9 2019 System restarted at 10:27:57 EST Mon Dec 9", "{'execute.return_value': ''} golden_parsed_output_iosv = { \"active\": { \"boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\", \"configuration_register\":", "to import, export, distribute or use encryption. Importers, exporters, distributors", "\"sn\": \"FXS170802GL\", } } }, \"FAN-MOD-4HS 1\": { \"other\": {", "}, 'curr_config_register': '0xF' } } device_output = {'execute.return_value':''' best-c3945-IOS3#show version", "'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at': '10:27:57 EST Mon Dec 9 2019', 'uptime': '1", "0.55 }, 3: { \"invoked\": 116196, \"usecs\": 976, \"tty\": 0,", "2 hours, 3 minutes', 'version': '12.2(55)SE8', 'version_short': '12.2' } }", "memory. Processor board ID FGL161010K8 2 FastEthernet interfaces 3 Gigabit", "self.maxDiff = None self.device = Mock(**self.golden_output_2) obj = ShowInventory(device=self.device) parsed_output", "'slot': { '0': { 'other': { 'AIM-VPN/SSL-2': { 'descr': 'Encryption", "3\", DESCR: \"Two-Port Fast Ethernet High Speed WAN Interface Card\"", "def test_golden_ios(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios) version_obj =", "Device(name='c3850') empty_output = {'execute.return_value': ''} golden_parsed_output = { \"five_sec_cpu_total\": 13,", "= platform_obj.parse( status='active', slot='0', iotype='ipm') self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_golden_active_opm(self): self.maxDiff", "to comply with applicable laws and regulations. If you are", "NM on Slot 2 SubSlot 0\", DESCR: \"Gigabit(1000BaseT) module for", "1\", DESCR: \"VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0\" PID:", "Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2011 by Cisco Systems, Inc.", "http://www.cisco.com/techsupport Copyright (c) 1986-2018 by Cisco Systems, Inc. Compiled Mon", "'name': 'GigabitEthernet1/0/49', 'pid': 'GLC-SX-MMD', 'sn': 'ACW102938VS', 'vid': 'V01 ', },", "def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardware(device=self.device1) with self.assertRaises(SchemaEmptyParserError):", "{ 'other': { 'AIM-VPN/SSL-2': { 'descr': 'Encryption AIM Element', 'name':", "\"vid\": \"V06 \", } }, }, } } }, \"3\":", "FXO voice interface daughtercard\" PID: EM-HDA-6FXO , VID: V03 ,", "T1/E1 on Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC65428K9F',", "serial number : FDO1633Q14M Model revision number : A0 Motherboard", "self.assertEqual(parsed_output, self.golden_parsed_output_asr1k) class test_show_platform_power(test_show_platform_power_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj", "semi_empty_output = {'execute.return_value': '''\\ Directory of flash:/ '''} golden_parsed_output_iosv =", "NAME: \"switching engine sub-module of 2\", DESCR: \"WS-F6700-DFC3CXL Distributed Forwarding", "= ShowVersion(device=self.dev1) with self.assertRaises(KeyError): parsed_output = version_obj.parse() def test_golden_iosv(self): self.maxDiff", "Ethernet on Slot 0', 'pid': 'CISCO3845-MB', 'sn': 'FOC729346GQ', 'vid': 'V09", "obj.parse(rp='active', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_golden_standby(self): self.device", "1.2, 512KB L2 Cache Last reset from s/w reset SuperLAT", "inventory NAME: \"WS-C6503-E\", DESCR: \"Cisco Systems Catalyst 6500 3-slot Chassis", "= Mock(**self.empty_output) platform_obj = ShowBootvar(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse()", "NAME: \"Transceiver Te1/4\", DESCR: \"X2 Transceiver 10Gbase-SR Te1/4\" PID: X2-10GB-SR", "Gigabit Ethernet interfaces 2 Ten Gigabit Ethernet interfaces The password-recovery", "0 Last switchover reason = unsupported Hardware Mode = Simplex", "= Mock(**self.golden_output_c3850) platform_obj = ShowSwitch(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class", "'''} golden_parsed_output_4 = { 'slot': { '1': { 'rp': {", "ShowSwitchDetail from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as test_show_platform_iosxe,\\ TestShowPlatformPower as test_show_platform_power_iosxe,\\", "Image ------ ----- ----- ---------- ---------- * 1 30 WS-C3750X-24S", "14:05', \"processor_type\": \"revision 1.0\", \"platform\": \"IOSv\", \"image_type\": \"production image\", 'processor_board_flash':", "V05 , SN: FGL161010K8 NAME: \"Cisco Services Performance Engine 150", "ShowPlatformHardware(device=self.device) parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def", "with self.assertRaises(AttributeError): parsered_output = version_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output)", ", SN: FOC91864MNN '''} golden_parsed_output_9 = { 'main': { 'chassis':", "\"name\": \"switching engine sub-module of 2\", \"pid\": \"WS-F6700-DFC3CXL\", \"sn\": \"SAL1214LAG5\",", "'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at': '10:27:57 EST Mon Dec 9 2019', 'uptime':", "Next reboot ------------------------------------------------------------------ ipbase ipbasek9 Permanent ipbasek9 security securityk9 Permanent", "WS-C1010XR-48FPS-I, VID: V05 , SN: FD2043B0K3 NAME: \"Switch 1 -", "IOSv (revision 1.0) with with 435457K/87040K bytes of memory. Processor", "V03 CLEI Code Number : CMMFF00ARC Hardware Board Revision Number", "drw- 0 Oct 14 2013 00:00:00 +00:00 config 267 -rw-", "platform_obj = ShowSwitchDetail(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) if __name__ ==", "1 PWR-1400-AC\", \"descr\": \"AC power supply, 1400 watt 1\", \"pid\":", "362874 3321960 109 1.03% 0.54% 0.48% 0 PIM Process 84", "\"1\": { \"rp\": { \"VS-SUP2T-10G\": { \"name\": \"1\", \"descr\": \"VS-SUP2T-10G", "Distributed Forwarding Card 4 Rev. 1.2\" PID: WS-F6K-DFC4-E , VID:", "Image Version = Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version", ", SN: FXS181101V4 NAME: \"CLK-7600 2\", DESCR: \"OSR-7600 Clock FRU", "sub-module of 1\", \"pid\": \"WS-F6K-PFC3BXL\", \"sn\": \"SAL11434LYG\", \"vid\": \"V01\", },", "}, \"6\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/6\",", "http://www.cisco.com/techsupport Copyright (c) 1986-2006 by cisco Systems, Inc. Compiled Thu", "\"subslot\": { \"0\": { \"WS-F6700-CFC\": { \"descr\": \"WS-F6700-CFC Centralized Forwarding", "0.54, \"process\": \"PIM Process\", \"five_min_cpu\": 0.48, \"runtime\": 362874, \"pid\": 368,", "Mock(**self.golden_output_serdes) obj = ShowPlatformHardwareSerdes(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None", "'version': { 'chassis': 'CISCO3945-CHASSIS', 'chassis_sn': 'FGL161010K8', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Fri", "18:57:18 +00:00\", \"index\": \"269\", \"size\": \"119\", \"permissions\": \"-rw-\" }, \"config\":", "serial number : FDO202823P8 System serial number : FDO2028F1WK Top", ", SN: ONT17020338 NAME: \"Transceiver Te2/2\", DESCR: \"X2 Transceiver 10Gbase-SR", "'DTN1504L0E9', 'vid': 'V01D ', }, }, '1/1/1': { 'SFP-10G-SR': {", "\"12.2\", \"platform\": \"s72033_rp\", \"version\": \"12.2(18)SXF7\", \"image_id\": \"s72033_rp-ADVENTERPRISEK9_WAN-M\", 'compiled_by': 'kellythw', 'compiled_date':", "'''\\ Directory of flash:/ '''} golden_parsed_output_iosv = { \"dir\": {", "= Mock(**self.golden_output_2) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_2)", "\"Clear/Subrate T3/E3 WAN on Slot 1\", DESCR: \"Clear/Subrate T3/E3 WAN\"", "= None self.dev = Mock(**self.golden_output_1) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output =", "DESCR: \"c3845 Motherboard with Gigabit Ethernet\" PID: CISCO3845-MB , VID:", "self.assertEqual(parsed_output, self.golden_parsed_output_5) def test_golden_output_6(self): self.maxDiff = None self.device = Mock(**self.golden_output_6)", "C3900 Software (C3900-UNIVERSALK9-M), Version 15.0(1)M7, RELEASE SOFTWARE (fc2) Technical Support:", "\"descr\": \"OSR-7600 Clock FRU 2\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\":", "Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)\", \"bootldr\": \"s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version", "by prod_rel_team ROM: System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)", "obj = ShowPlatformHardware(device=self.device) parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output,", "\"name\": \"PS 1 PWR-1400-AC\", \"descr\": \"AC power supply, 1400 watt", "\"\", }, \"WS-F6K-PFC3BXL\": { \"descr\": \"WS-F6K-PFC3BXL Policy Feature Card 3", "def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError):", "\"5\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/5\", \"name\":", "'vid': 'V05 ', }, }, }, 'C3900 AC Power Supply", "module for EtherSwitch NM on Slot 2 SubSlot 0\", DESCR:", "= Mock(**self.golden_output_iosv) inventory_obj = ShowInventory(device=self.dev_iosv) parsed_output = inventory_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv)", "} } } golden_output_iosv = {'execute.return_value': '''\\ Redundant System Information", "(fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2006 by cisco Systems,", "test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitchDetail(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output", "test_show_processes_cpu_history_iosxe,\\ TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\\ TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\\", "84 3582279 1466728 2442 0.55% 0.87% 2.77% 0 IOSv e1000", "\"WS-F6700-DFC3CXL\", \"sn\": \"SAL1214LAG5\", \"vid\": \"V01\", } } }, } }", "self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_1) version_obj = ShowVersion(device=self.dev_iosv) parsed_output", "\"name\": \"2\", \"descr\": \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev.", "ShowPlatform,\\ ShowPlatformPower,\\ ShowProcessesCpuHistory,\\ ShowProcessesCpuPlatform,\\ ShowPlatformSoftwareStatusControl,\\ ShowPlatformSoftwareSlotActiveMonitorMem,\\ ShowPlatformHardware,\\ ShowPlatformHardwarePlim,\\ ShowPlatformHardwareQfpBqsOpmMapping,\\ ShowPlatformHardwareQfpBqsIpmMapping,\\", "software, Version 3.0.0. Bridging software. TN3270 Emulation software. 1 Virtual", "1.8\" PID: WS-F6K-PFC3BXL , VID: V01, SN: SAL11434LYG NAME: \"2\",", ", VID: V05 , SN: FTX7908A3RQ NAME: \"VWIC2-2MFT-T1/E1 - 2-Port", "unittest from unittest.mock import Mock from pyats.topology import Device from", "boot 264 drw- 0 Oct 14 2013 00:00:00 +00:00 config", "\"Six port FXO voice interface daughtercard on Slot 1 SubSlot", "of non-volatile configuration memory. 2097152K bytes of ATA System CompactFlash", "2 PWR-2700-AC/4\", \"descr\": \"2700W AC power supply for CISCO7604 2\",", "SAL17163901 NAME: \"Transceiver Te1/4\", DESCR: \"X2 Transceiver 10Gbase-SR Te1/4\" PID:", "platform_obj = ShowPlatformPower(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe):", "V01L , SN: LTP13579L3R NAME: \"TenGigabitEthernet2/1/1\", DESCR: \"SFP-10GBase-LR\" PID: SFP-10G-LR", "Time since cat6k_tb1 switched to active is 10 weeks, 5", "on Slot 0\", DESCR: \"c3845 Motherboard with Gigabit Ethernet\" PID:", "= cpu_platform_obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_empty(self): self.device1", "Mock(**self.empty_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def", "\"sn\": \"SAL17152N0F\", \"subslot\": { \"0\": { \"VS-F6K-MSFC5\": { \"descr\": \"VS-F6K-MSFC5", "variable = BOOTLDR variable = Configuration register is 0x2012 Standby", "five minutes: 15% '''} golden_parsed_output_1 = { \"sort\": { 1:", "self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def test_golden_serdes(self): self.device", "Number : 800-38990-01 Top Assembly Revision Number : F0 Version", "} } }, \"3\": { \"lc\": { \"WS-X6824-SFP\": { \"name\":", "'Encryption AIM Element', 'name': 'Virtual Private Network (VPN) Module on", "\"CLK-7600 1\", DESCR: \"OSR-7600 Clock FRU 1\" PID: CLK-7600 ,", "CompactFlash 2 (Read/Write) 10080K bytes of ATA CompactFlash 3 (Read/Write)", "register is 0x0'''} golden_parsed_output_ios = { 'version': {'bootldr': 'C3750E Boot", "(fc1)\", \"hostname\": \"cat6k_tb1\", \"uptime\": \"10 weeks, 5 days, 5 hours,", "{ 'chassis': { 'CISCO3845': { 'descr': '3845 chassis', 'name': '3845", "\"Cisco Systems Cisco 6500 4-slot Chassis System\" PID: WS-C6504-E ,", "GLC-SX-MMD , VID: V01 , SN: ACW102938VS '''} golden_parsed_output_4 =", "\"4\", DESCR: \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4\"", "device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') def", "self.device1 = Mock(**self.empty_output) obj = ShowProcessesCpu(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "ShowPlatformHardwareQfpStatisticsDrop( device=self.device) parsed_output = platform_obj.parse(status='active') self.assertEqual(parsed_output, self.golden_parsed_output_active) class test_show_env(test_show_env_iosxe): def", "= None self.assertEqual(parsed_output, self.golden_parsed_output_subslot) def test_golden_slot_internal(self): self.device = Mock(**self.golden_output_slot_internal) obj", "{ \"other\": { \"FAN-MOD-4HS 1\": { \"name\": \"FAN-MOD-4HS 1\", \"descr\":", "SchemaMissingKeyError from genie.libs.parser.ios.show_platform import ShowVersion,\\ Dir,\\ ShowRedundancy,\\ ShowInventory,\\ ShowBootvar, \\", "on Slot 0\", DESCR: \"Encryption AIM Element\" PID: AIM-VPN/SSL-2 ,", "= Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { 'main':", "2.77% 0 IOSv e1000 412 113457 116196 976 0.15% 0.07%", "= Mock(**self.empty_output) obj = ShowPlatformHardwarePlim(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(port='0/0/0')", "0\", DESCR: \"Encryption AIM Element\" PID: AIM-VPN/SSL-2 , VID: V01,", "(s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) cat6k_tb1 uptime is 10", "Software, C3900 Software (C3900-UNIVERSALK9-M), Version 15.0(1)M7, RELEASE SOFTWARE (fc2) Technical", "'license_type': 'Permanent', 'next_reload_license_level': 'datak9', }, 'ipbase': { 'license_level': 'ipbasek9', 'license_type':", "sub-module of 4\", \"pid\": \"WS-F6700-CFC\", \"sn\": \"SAL13516QS8\", \"vid\": \"V06\", }", "SN: SAL11434P2C NAME: \"msfc sub-module of 1\", DESCR: \"WS-SUP720 MSFC3", "chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0\" PID: IOSv ,", "4%; five minutes: 9% PID Runtime(ms) Invoked uSecs 5Sec 1Min", "= Mock(**self.golden_output_serdes_internal) obj = ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff =", "} } }, \"4\": { \"lc\": { \"WS-X6748-GE-TX\": { \"name\":", "1', 'pid': 'PWR-C2-2929WAC', 'sn': 'LIT03728KKK', 'vid': 'V02L ', }, },", "self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj", "PID: WS-C6503-E-FAN , VID: V02, SN: DCH183500KW NAME: \"PS 1", "self.golden_parsed_output_iosv) def test_golden_ios(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios) version_obj", "Power Supply 1', 'name': 'C3900 AC Power Supply 1', 'pid':", "PID: AIM-VPN/SSL-3 , VID: V01, SN: FOC758693YO NAME: \"Clear/Subrate T3/E3", ", VID: V06 , SN: ONT170202T1 NAME: \"Transceiver Te1/5\", DESCR:", "bytes of ATA CompactFlash 1 (Read/Write) 0K bytes of ATA", "\"descr\": \"VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS", "Te2/2\", \"name\": \"Transceiver Te2/2\", \"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702020H\", \"vid\": \"V06", "Mock(**self.golden_output) platform_obj = ShowPlatformPower(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class", "0 minutes Switchovers system experienced = 0 Standby failures =", "{ \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te1/5\", \"name\": \"Transceiver", "'descr': 'SFP-10GBase-SR', 'name': 'TenGigabitEthernet1/1/1', 'pid': 'SFP-10G-SR', 'sn': 'SPC1519005V', 'vid': 'V03", "', }, }, }, }, } golden_output_6 = {'execute.return_value': '''", "ShowProcessesCpuPlatform,\\ ShowPlatformSoftwareStatusControl,\\ ShowPlatformSoftwareSlotActiveMonitorMem,\\ ShowPlatformHardware,\\ ShowPlatformHardwarePlim,\\ ShowPlatformHardwareQfpBqsOpmMapping,\\ ShowPlatformHardwareQfpBqsIpmMapping,\\ ShowPlatformHardwareSerdes,\\ ShowPlatformHardwareSerdesInternal,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\\", "'Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2 SubSlot 0',", "def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowVersionRp(device=self.device) parsed_output =", "\"lc\": { \"WS-X6816-10GE\": { \"name\": \"2\", \"descr\": \"WS-X6816-10GE CEF720 16", "NM-16ESW , VID: V01 , SN: FOC135464KO NAME: \"Gigabit(1000BaseT) module", "Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 15.2(2)E8, RELEASE SOFTWARE", "'C3750E Boot Loader (C3750X-HBOOT-M) Version ' '15.2(3r)E, RELEASE SOFTWARE (fc1)',", "}, }, }, }, }, }, } golden_output_8 = {'execute.return_value':", "Boot Loader (C3750X-HBOOT-M) Version 15.2(3r)E, RELEASE SOFTWARE (fc1) R5 uptime", "\"TenGigabitEthernet2/1/1\", DESCR: \"SFP-10GBase-LR\" PID: SFP-10G-LR , VID: V02 , SN:", "test_golden_ios(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios) version_obj = ShowVersion(device=self.dev_iosv)", "'pid': 'CISCO2821', 'sn': 'FTX1234AMWT', 'vid': 'V07 ', }, }, },", "License Type: Permanent Next reload license Level: ipservices cisco WS-C3750X-24S", "minutes\", \"communications\": \"Down\", \"hw_mode\": \"Simplex\", \"communications_reason\": \"Failure\", \"standby_failures\": \"0\" },", "0 } golden_output_1 = {'execute.return_value': ''' CPU utilization for five", "'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'next_reload_license_level': 'ipservices', 'chassis': 'WS-C3750X-24S', 'main_mem':", "ShowPlatformHardwareQfpStatisticsDrop,\\ ShowEnvironment,\\ ShowModule,\\ ShowSwitch, ShowSwitchDetail from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as", "slot='0') class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def test_golden_serdes(self): self.device = Mock(**self.golden_output_serdes) obj =", "LTP13579L3R NAME: \"TenGigabitEthernet2/1/1\", DESCR: \"SFP-10GBase-LR\" PID: SFP-10G-LR , VID: V02", "'AIM-VPN/SSL-3': { 'descr': 'Encryption AIM Element', 'name': 'Virtual Private Network", "Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)\", \"bootldr\": \"s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M),", "2019 System image file is \"flash0:c3900-universalk9-mz.SPA.150-1.M7.bin\" Last reload type: Normal", "board ID FGL161010K8 2 FastEthernet interfaces 3 Gigabit Ethernet interfaces", "is enabled. 512K bytes of flash-simulated non-volatile configuration memory. Base", "SN: ONT1702033D NAME: \"2\", DESCR: \"WS-X6816-10GE CEF720 16 port 10GE", "NAME: \"VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on", "14:05 by prod_rel_team Configuration register = 0x0 Peer (slot: 0)", "minutes\", \"config_register\": \"0x0\", \"curr_sw_state\": \"ACTIVE\" } } } golden_output_iosv =", "FlexStackPlus Module', 'pid': 'C1010X-STACK', 'sn': 'FD232323XXZ', 'vid': 'V02 ', },", "contact us by sending email to <EMAIL>. Cisco IOSv (revision", "SN: FXS1821Q2H9 NAME: \"CLK-7600 1\", DESCR: \"OSR-7600 Clock FRU 1\"", "Power Supply\" PID: C3KX-PWR-350WAC , VID: V01D , SN: DTN1504L0E9", "Chassis System\", \"pid\": \"WS-C6504-E\", \"vid\": \"V01\", \"sn\": \"FXS1712Q1R8\", } }", "\"Switch 1 - Power Supply 1\", DESCR: \"ABC Power Supply\"", "'vid': 'V04 ', }, }, }, }, } golden_output_9 =", "Te1/5\" PID: X2-10GB-SR , VID: V06 , SN: ONT1702033D NAME:", "\"16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2\", DESCR: \"16 Port", "obj = ShowPlatformHardwareSerdes(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output,", "best-c3945-IOS3#show version Cisco IOS Software, C3900 Software (C3900-UNIVERSALK9-M), Version 15.0(1)M7,", "test_golden_port(self): self.device = Mock(**self.golden_output_port) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(port='0/0/0')", "'1.0', }, }, }, } golden_output_iosv = {'execute.return_value': '''\\ NAME:", "golden_parsed_output_ios_1 = { 'version': {'version_short': '15.2', 'platform': 'C3750E', 'version': '15.2(2)E8',", "Systems, Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team Configuration register", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def test_empty(self): self.dev =", "return this product immediately. A summary of U.S. laws governing", "show bootvar '''} def test_empty(self): self.dev = Mock(**self.empty_output) platform_obj =", "'PowerPC405', 'rtr_type': 'WS-C3750X-24S', 'chassis_sn': 'FDO1633Q14S', 'number_of_intfs': { 'Virtual Ethernet': '14',", "'AIM-VPN/SSL-3', 'sn': 'FOC758693YO', 'vid': 'V01', }, }, }, '1': {", "self.dev1 = Mock(**self.empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsered_output =", "} }, } } }, }, } golden_output_2 = {'execute.return_value':", "\\ ShowProcessesCpuSorted,\\ ShowProcessesCpu,\\ ShowVersionRp,\\ ShowPlatform,\\ ShowPlatformPower,\\ ShowProcessesCpuHistory,\\ ShowProcessesCpuPlatform,\\ ShowPlatformSoftwareStatusControl,\\ ShowPlatformSoftwareSlotActiveMonitorMem,\\", "by kellythw Image text-base: 0x40101040, data-base: 0x42D98000 ROM: System Bootstrap,", "\"pid\": \"X2-10GB-SR\", \"sn\": \"ONT1702020H\", \"vid\": \"V06 \", } }, \"3\":", "dir_obj = Dir(device=self.dev_iosv) parsed_output = dir_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_redundancy(unittest.TestCase):", "VID: V02, SN: SAL1128UPQ9 NAME: \"switching engine sub-module of 2\",", "by Cisco Systems, Inc. Compiled Wed 26-Jun-13 09:56 by prod_rel_team", "ipbasek9 security securityk9 Permanent securityk9 uc None None None data", "None self.device = Mock(**self.golden_output_3) obj = ShowInventory(device=self.device) parsed_output = obj.parse()", "SN: DTN1504L0E9 NAME: \"TenGigabitEthernet1/1/1\", DESCR: \"SFP-10GBase-SR\" PID: SFP-10G-SR , VID:", "29 2017 00:00:00 +00:00 vios-adventerprisek9-m 268 -rw- 524288 Oct 17", "RELEASE SOFTWARE (fc1)\", \"hostname\": \"cat6k_tb1\", \"uptime\": \"10 weeks, 5 days,", "self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1)", "System returned to ROM by reload at 10:26:47 EST Mon", "0/0/0') def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj", "\"SAL17163901\", \"vid\": \"V03\", }, }, \"4\": { \"X2-10GB-SR\": { \"descr\":", "parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k) def test_golden_ios_1(self): self.maxDiff = None", "'hostname': 'best-c3945-IOS3', 'image_id': 'C3900-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'Reload Command',", "def test_golden_ios_cat6k(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_cat6k) version_obj =", "def test_semi_empty(self): self.dev2 = Mock(**self.semi_empty_output) platform_obj = ShowPlatform(device=self.dev2) with self.assertRaises(SchemaEmptyParserError):", "BRI U (2091, 3086)', 'name': 'Wan Interface Card BRI U", "RELEASE SOFTWARE (fc2)\", \"uptime_in_curr_state\": \"1 day, 16 hours, 42 minutes\",", "= obj.parse(key_word='CPU', sort_time='5min') self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None", "self.assertEqual(parsed_output, self.golden_parsed_output_9) class test_show_bootvar(unittest.TestCase): dev = Device(name='ios') dev_iosv = Device(name='iosv')", "'262144', 'mem_size': {'flash-simulated non-volatile configuration': '512'}, 'next_reload_license_level': 'ipservices', 'number_of_intfs': {'Gigabit", "\"vid\": \"V03\", }, }, \"4\": { \"X2-10GB-SR\": { \"descr\": \"X2", "- Power Supply 1\", DESCR: \"LLL Power Supply\" PID: PWR-C2-2929WAC", "obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_7) def test_golden_output_8(self):", "NAME: \"3845 chassis\", DESCR: \"3845 chassis\" PID: CISCO3845 , VID:", "\"config_register\": \"0x0\", \"curr_sw_state\": \"ACTIVE\" } } } golden_output_iosv = {'execute.return_value':", ", VID: V02, SN: SAL1128UPQ9 NAME: \"switching engine sub-module of", "governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If", "self.golden_parsed_output_iosv) class TestShowInventory(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output", ", SN: FOC98675W3E NAME: \"Virtual Private Network (VPN) Module on", "on Slot 1 SubSlot 1\", DESCR: \"Six port FXO voice", "self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def test_golden_active_ipm(self): self.device", "software. TN3270 Emulation software. 1 Virtual Ethernet/IEEE 802.3 interface 50", "} } }, \"PS 1 PWR-1400-AC\": { \"other\": { \"PS", "is 0x2102 '''} golden_output_ios_1 = {'execute.return_value': '''\\ Cisco IOS Software,", "= None self.device = Mock(**self.golden_output_8) obj = ShowInventory(device=self.device) parsed_output =", "U (2091, 3086)', 'name': 'Wan Interface Card BRI U (2091,", "Process\", \"IOSv e1000\", \"OSPF-1 Hello\" ], \"five_sec_cpu_interrupts\": 0 } golden_output_1", "test_golden_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1) obj = ShowProcessesCpu(device=self.device)", "256K bytes of non-volatile configuration memory. 2097152K bytes of ATA", "'compiled_date': 'Mon 22-Jan-18 04:07', 'compiled_by': 'prod_rel_team', 'rom': 'Bootstrap program is", "{'execute.return_value': ''' best-c3945-IOS3#show inventory NAME: \"CISCO3945-CHASSIS\", DESCR: \"CISCO3945-CHASSIS\" PID: CISCO3945-CHASSIS", "Card BRI U (2091, 3086)\" PID: WIC-1B-U-V2 , VID: V01,", "assistance please contact us by sending email to <EMAIL>. cisco", "10 2019', 'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent',", "'slot': { '1': { 'lc': { 'SM-ES2-16-P': { 'descr': 'SM-ES2-16-P',", "= obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_4) def test_golden_output_5(self): self.maxDiff = None self.device", "None self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj =", "States and local country laws governing import, export, transfer and", "test_show_platform_hardware_serdes_statistics_iosxe,\\ TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\\ ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\\", "platform_obj.parse() def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj", "platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0')", "3086)', 'name': 'Wan Interface Card BRI U (2091, 3086) on", "}, } } }, \"4\": { \"lc\": { \"WS-X6748-GE-TX\": {", "with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform(test_show_platform_iosxe): def test_empty(self): self.dev1", "not imply third-party authority to import, export, distribute or use", "sub-module of 1\", \"pid\": \"VS-F6K-MSFC5\", \"sn\": \"SAL17142D06\", \"vid\": \"\", },", "watt 1\", \"pid\": \"PWR-1400-AC\", \"vid\": \"V01\", \"sn\": \"ABC0830J127\", } }", "ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios) def test_golden_ios_cat6k(self): self.maxDiff =", "''' show version Cisco Internetwork Operating System Software IOS (tm)", "+00:00 e1000_bia.txt 2142715904 bytes total (1989595136 bytes free) '''} def", "'1/1/1': { 'SFP-10G-SR': { 'descr': 'SFP-10GBase-SR', 'name': 'TenGigabitEthernet1/1/1', 'pid': 'SFP-10G-SR',", "IOSv e1000 412 113457 116196 976 0.15% 0.07% 0.07% 0", "PWR-2700-AC/4\", \"descr\": \"2700W AC power supply for CISCO7604 2\", \"pid\":", "= obj.parse() class test_show_platform(test_show_platform_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj", "reboot ------------------------------------------------------------------ ipbase ipbasek9 Permanent ipbasek9 security securityk9 Permanent securityk9", "= Mock(**self.golden_output_serdes) obj = ShowPlatformHardwareSerdes(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff =", "\"Virtual Private Network (VPN) Module on Slot 0\", DESCR: \"Encryption", "15.0(1r)M13, RELEASE SOFTWARE (fc1)', 'rtr_type': 'CISCO3945-CHASSIS', 'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at': '10:27:57", "\"invoked\": 116196, \"usecs\": 976, \"tty\": 0, \"one_min_cpu\": 0.07, \"process\": \"OSPF-1", "= redundancy_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class TestShowInventory(unittest.TestCase): dev1 = Device(name='empty') dev_iosv", "} }, } } }, \"4\": { \"lc\": { \"WS-X6748-GE-TX\":", "}, 'slot': { '0': { 'other': { 'AIM-VPN/SSL-2': { 'descr':", "{ '1': { 'lc': { 'SM-ES2-16-P': { 'descr': 'SM-ES2-16-P', 'name':", "Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0')", "security securityk9 Permanent securityk9 uc None None None data datak9", "Systems Catalyst 6500 3-slot Chassis System\" PID: WS-C6503-E , VID:", "ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_9) class test_show_bootvar(unittest.TestCase): dev =", "''} golden_parsed_output_iosv = { \"active\": { \"boot_variable\": \"disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12\", \"configuration_register\": \"0x2012\"", "ShowProcessesCpuHistory,\\ ShowProcessesCpuPlatform,\\ ShowPlatformSoftwareStatusControl,\\ ShowPlatformSoftwareSlotActiveMonitorMem,\\ ShowPlatformHardware,\\ ShowPlatformHardwarePlim,\\ ShowPlatformHardwareQfpBqsOpmMapping,\\ ShowPlatformHardwareQfpBqsIpmMapping,\\ ShowPlatformHardwareSerdes,\\ ShowPlatformHardwareSerdesInternal,\\", ", VID: , SN: FXS170802GL NAME: \"CLK-7600 2\", DESCR: \"OSR-7600", "= dir_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) dir_obj = Dir(device=self.dev1)", "'EM-HDA-6FXO', 'sn': 'FOC85389QXB', 'vid': 'V03 ', }, }, }, 'vid':", "PDT Mon Sep 10 2018', 'uptime': '9 weeks, 4 days,", "class test_show_env(test_show_env_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowEnvironment(device=self.dev)", "Type: Permanent Next reload license Level: ipservices cisco WS-C3750X-24S (PowerPC405)", "SW Image ------ ----- ----- ---------- ---------- * 1 30", "9 2019', 'returned_to_rom_by': 'reload', 'rom': 'System Bootstrap, Version 15.0(1r)M13, RELEASE", "'uptime': '8 weeks, 3 days, 10 hours, 27 minutes', 'returned_to_rom_by':", "SAL171848KL NAME: \"4\", DESCR: \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet", "= Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv", "Performance Engine 150 for Cisco 3900 ISR\" PID: C3900-SPE150/K9 ,", "DTN1504L0E9 NAME: \"TenGigabitEthernet1/1/1\", DESCR: \"SFP-10GBase-SR\" PID: SFP-10G-SR , VID: V03", "as test_show_platform_hardware_iosxe,\\ TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\\ TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\\ TestShowPlatformHardwareQfpBqsIpmMapping as", "4 days, 2 hours, 3 minutes System returned to ROM", "\"s/w reset\", 'processor_board_flash': '65536K', \"number_of_intfs\": { \"Gigabit Ethernet/IEEE 802.3\": \"50\",", "of ATA CompactFlash 3 (Read/Write) Configuration register is 0x0'''} golden_parsed_output_ios", "Ethernet Rev. 2.6\" PID: WS-X6748-GE-TX , VID: V02, SN: SAL1128UPQ9", "PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675W3E NAME: \"Virtual", "= ShowModule(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch(test_show_switch_iosxe): def test_empty(self):", "test_show_platform_iosxe,\\ TestShowPlatformPower as test_show_platform_power_iosxe,\\ TestShowVersionRp as test_show_version_rp_iosxe,\\ TestShowProcessesCpu as test_show_processes_cpu_iosxe,\\", ", VID: V01, SN: FOC2837465E '''} golden_parsed_output_7 = { 'main':", "Ethernet/IEEE 802.3': '1' }, \"mem_size\": {\"non-volatile configuration\": \"1917\", \"packet buffer\":", "16 hours, 42 minutes\" } } golden_output_iosv = {'execute.return_value': '''\\", "Technology Corp). X.25 software, Version 3.0.0. Bridging software. TN3270 Emulation", "VID: V06 , SN: ONT1702020H NAME: \"Transceiver Te2/3\", DESCR: \"X2", "27 minutes System returned to ROM by power-on System restarted", "\"image_id\": \"s72033_rp-ADVENTERPRISEK9_WAN-M\", 'compiled_by': 'kellythw', 'compiled_date': 'Thu 23-Nov-06 06:26', \"image_type\": \"production", "'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675W3E', 'vid': 'V01 ', }, }, }, },", ": A0 Model number : WS-C3750X-24S-E Daughterboard assembly number :", "test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output", "VID: V00 , SN: FDO123R12W NAME: \"Switch 1 - Power", "\"switching engine sub-module of 1\", DESCR: \"WS-F6K-PFC3BXL Policy Feature Card", "\"WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8\" PID: WS-F6K-PFC3BXL ,", "'sn': 'FOC135464KO', 'subslot': { '0': { 'GE-DCARD-ESW': { 'descr': 'Gigabit(1000BaseT)", "Corp). X.25 software, Version 3.0.0. Bridging software. TN3270 Emulation software.", "Motherboard revision number : A0 Model number : WS-C3750X-24S-E Daughterboard", "\"PIM Process\", \"IOSv e1000\", \"OSPF-1 Hello\" ], \"five_sec_cpu_interrupts\": 0 }", "1', 'pid': 'NM-1T3/E3=', 'sn': 'FOC28476ADM', 'vid': 'V01 ', }, },", "High Speed WAN Interface Card', 'name': 'Two-Port Fast Ethernet High", "CISCO7604 1\" PID: PWR-2700-AC/4 , VID: V03, SN: APS1707008Y NAME:", "'uptime': '9 weeks, 4 days, 2 hours, 3 minutes', 'version':", "(c) 1986-2006 by cisco Systems, Inc. Compiled Thu 23-Nov-06 06:26", "4 Rev. 1.2\" PID: WS-F6K-DFC4-E , VID: V02, SN: SAL171846RF", "test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitchDetail(device=self.dev_c3850)", "self.golden_parsed_output_active_opm) class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj =", "\"WS-F6700-CFC\", \"sn\": \"SAL13516QS8\", \"vid\": \"V06\", } } }, } }", "3900 ISR', 'name': 'Cisco Services Performance Engine 150 for Cisco", "'FDO2028F1WK', 'curr_config_register': '0xF', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 26-Jun-13 09:56', 'hostname':", "X2-10GB-SR , VID: V06 , SN: ONT1702033D NAME: \"2\", DESCR:", "revision number : B0 Model number : WS-C3750X-24P-L Daughterboard assembly", "Invoked uSecs 5Sec 1Min 5Min TTY Process 368 362874 3321960", "with self.assertRaises(SchemaEmptyParserError): parsered_output = dir_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output)", "ShowVersionRp(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform(test_show_platform_iosxe): def test_empty(self):", "{ \"other\": { \"CLK-7600 2\": { \"name\": \"CLK-7600 2\", \"descr\":", ", VID: V01D , SN: DTN1504L0E9 NAME: \"TenGigabitEthernet1/1/1\", DESCR: \"SFP-10GBase-SR\"", "= Mock(**self.golden_output_4) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_4)", "'sample_switch', 'uptime': '8 weeks, 3 days, 10 hours, 27 minutes',", "Power Supply 1': { 'other': { 'C3900 AC Power Supply", "\"WS-SUP720 MSFC3 Daughterboard Rev. 3.1\", \"name\": \"msfc sub-module of 1\",", "\"Cisco Services Performance Engine 150 for Cisco 3900 ISR on", "test_show_platform_software_status_control_processor_brief_iosxe,\\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\\ TestShowPlatformHardware as test_show_platform_hardware_iosxe,\\ TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\\", "Slot 1 SubSlot 1', 'pid': 'EM-HDA-6FXO', 'sn': 'FOC85389QXB', 'vid': 'V03", "'FD232323XXZ', 'vid': 'V02 ', }, 'PWR-C2-2929WAC': { 'descr': 'LLL Power", "EARL sub-module of 4\", \"pid\": \"WS-F6700-CFC\", \"sn\": \"SAL13516QS8\", \"vid\": \"V06\",", "self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) inventory_obj = ShowInventory(device=self.dev_iosv) parsed_output", "parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self):", "number : 800-32727-03 Daughterboard serial number : FDO172217ED System serial", "= None self.assertEqual( parsed_output, self.golden_parsed_output_slot_internal) def test_empty(self): self.device1 = Mock(**self.empty_output)", "\"subslot\": { \"0\": { \"WS-SUP720\": { \"descr\": \"WS-SUP720 MSFC3 Daughterboard", "and regulations. If you are unable to comply with U.S.", "test_empty(self): self.dev1 = Mock(**self.empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsered_output", "on Slot 0 SubSlot 1\", DESCR: \"Wan Interface Card BRI", "512K bytes of flash-simulated non-volatile configuration memory. Base ethernet MAC", "'name': 'Wan Interface Card BRI U (2091, 3086) on Slot", "Card BRI U (2091, 3086) on Slot 0 SubSlot 1\",", "'Virtual Ethernet/IEEE 802.3': '1' }, \"mem_size\": {\"non-volatile configuration\": \"1917\", \"packet", "- 8FXS/DID', 'name': 'High Density Voice Module - 8FXS/DID on", "\"Cisco Systems Catalyst 6500 3-slot Chassis System\", \"pid\": \"WS-C6503-E\", \"vid\":", "= ShowSwitch(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch_detail(test_show_switch_detail_iosxe): def test_empty(self):", "parsed_output = redundancy_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv =", "cryptographic features and is subject to United States and local", "'pid': 'WS-C0123X-45T-S', 'sn': 'FDO123R12W', 'subslot': { '1': { 'C3KX-PWR-350WAC': {", "def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(KeyError):", "WS-C3750X-24S-E Daughterboard assembly number : 800-32727-03 Daughterboard serial number :", "1917K bytes of non-volatile configuration memory. 8192K bytes of packet", "\"2\", DESCR: \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6\"", "0 SubSlot 1', 'pid': 'WIC-1B-U-V2', 'sn': '10293847', 'vid': 'V01', },", "3086) on Slot 0 SubSlot 1\", DESCR: \"Wan Interface Card", "\"switchovers_system_experienced\": \"0\", \"available_system_uptime\": \"0 minutes\", \"communications\": \"Down\", \"hw_mode\": \"Simplex\", \"communications_reason\":", "C3900-SPE150/K9 FOC16050QP6 Technology Package License Information for Module:'c3900' ----------------------------------------------------------------- Technology", "}, 'other': { 'AIM-VPN/SSL-3': { 'descr': 'Encryption AIM Element', 'name':", "= Mock(**self.golden_output_7) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_7)", "= ShowBootvar(device=self.dev_iosv) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev", "'15.2', 'platform': 'C3750E', 'version': '15.2(2)E8', 'image_id': 'C3750E-UNIVERSALK9-M', 'os': 'IOS', 'image_type':", "DRAM configuration is 72 bits wide with parity enabled. 255K", "bytes of memory. Processor board ID FGL161010K8 2 FastEthernet interfaces", "\"name\": \"WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of 2\",", "'Permanent', 'main_mem': '262144', 'mem_size': {'flash-simulated non-volatile configuration': '512'}, 'next_reload_license_level': 'ipservices',", ", VID: V03 , SN: QCS1604P0BT '''} golden_parsed_output_5 = {", "00:00:00 +00:00\", \"index\": \"267\", \"size\": \"147988420\", \"permissions\": \"-rw-\" } },", "'V04 ', }, }, }, }, } golden_output_9 = {'execute.return_value':", "\"other\": { \"PS 1 PWR-2700-AC/4\": { \"name\": \"PS 1 PWR-2700-AC/4\",", "as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\\ TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\\ TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\\ TestShowPlatformHardwareSerdesStatisticsInternal as", "\"pid\": \"X2-10GB-SR\", \"sn\": \"ONT170202T5\", \"vid\": \"V06 \", } }, \"5\":", "{ \"lc\": { \"WS-X6748-GE-TX\": { \"name\": \"2\", \"descr\": \"WS-X6748-GE-TX CEF720", "'WIC-1B-U-V2', 'sn': '10293847', 'vid': 'V01', }, }, '4': { 'PVDM2-64':", "} }, \"4\": { \"lc\": { \"WS-X6748-GE-TX\": { \"name\": \"4\",", "SOFTWARE (fc1)', 'hostname': 'sample_switch', 'uptime': '8 weeks, 3 days, 10", "number : FDO1633Q14M Model revision number : A0 Motherboard revision", "parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_slot) def test_golden_subslot(self):", "TN3270 Emulation software. 1 Virtual Ethernet/IEEE 802.3 interface 50 Gigabit", "1', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675W3E', 'vid': 'V01 ', }, },", "ShowPlatformHardwarePlim,\\ ShowPlatformHardwareQfpBqsOpmMapping,\\ ShowPlatformHardwareQfpBqsIpmMapping,\\ ShowPlatformHardwareSerdes,\\ ShowPlatformHardwareSerdesInternal,\\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\\ ShowPlatformHardwareQfpStatisticsDrop,\\ ShowEnvironment,\\ ShowModule,\\", "def test_empty(self): self.dev1 = Mock(**self.empty_output) redundancy_obj = ShowRedundancy(device=self.dev1) with self.assertRaises(SchemaEmptyParserError):", "self.assertEqual(parsed_output, self.golden_parsed_output_slot) def test_golden_subslot(self): self.device = Mock(**self.golden_output_subslot) obj = ShowPlatformHardwarePlim(device=self.device)", "'vid': 'V01 ', }, }, '1': { 'VWIC2-2MFT-T1/E1': { 'descr':", "None self.device = Mock(**self.golden_output_active) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) parsed_output =", "'Switch 1 - Power Supply 1', 'pid': 'C3KX-PWR-350WAC', 'sn': 'DTN1504L0E9',", "obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output)", "\"five_min_cpu\": 9, \"one_min_cpu\": 4, \"nonzero_cpu_processes\": [ \"PIM Process\", \"IOSv e1000\",", "VID: V04, SN: SAL171848KL NAME: \"4\", DESCR: \"WS-X6748-GE-TX CEF720 48", "ShowVersionRp,\\ ShowPlatform,\\ ShowPlatformPower,\\ ShowProcessesCpuHistory,\\ ShowProcessesCpuPlatform,\\ ShowPlatformSoftwareStatusControl,\\ ShowPlatformSoftwareSlotActiveMonitorMem,\\ ShowPlatformHardware,\\ ShowPlatformHardwarePlim,\\ ShowPlatformHardwareQfpBqsOpmMapping,\\", "self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_empty(self): self.device1 = Mock(**self.empty_output)", "flash0:/ 1 drw- 0 Jan 30 2013 00:00:00 +00:00 boot", "TestShowInventory(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value':", "HWIC-2FE , VID: V02 , SN: FOC16062824 NAME: \"C3900 AC", "'V05 ', }, }, }, 'slot': { '0': { 'lc':", "'2', 'pid': 'WS-C3210X-48T-S', 'sn': 'FD5678Z90P', 'subslot': { '2': { 'C3KX-PWR-007CBA':", "self.golden_parsed_output_1) class test_show_processes_cpu(test_show_processes_cpu_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) obj =", "0.48, \"runtime\": 362874, \"pid\": 368, \"five_sec_cpu\": 1.03 }, 2: {", "memory. Processor board ID FXS1821Q2H9 SR71000 CPU at 600Mhz, Implementation", "'system_restarted_at': '05:06:40 GMT Tue Sep 10 2019', 'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason':", "}, }, } golden_output_7 = {'execute.return_value': ''' NAME: \"2821 chassis\",", "switchover reason = unsupported Hardware Mode = Simplex Maintenance Mode", "DESCR: \"WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6\" PID:", "ONT1702020H NAME: \"Transceiver Te2/3\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/3\" PID:", "\"pid\": \"WS-F6K-DFC4-E\", \"sn\": \"SAL171846RF\", \"vid\": \"V02\", } }, \"1\": {", "GMT Tue Sep 10 2019 System image file is \"flash:c3750e-universalk9-mz.152-2.E8.bin\"", "assembly number : 800-32727-03 Daughterboard serial number : FDO202823P8 System", "parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch(test_show_switch_iosxe): def test_empty(self): self.dev1 =", "9 weeks, 4 days, 2 hours, 3 minutes System returned", "cpu sorted 5min | inc CPU CPU utilization for five", "{ 'slot': { '1': { 'lc': { 'SM-ES2-16-P': { 'descr':", "05-Aug-11 00:32 by prod_rel_team ROM: System Bootstrap, Version 15.0(1r)M13, RELEASE", "1 - Power Supply 1\", DESCR: \"ABC Power Supply\" PID:", "self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) redundancy_obj = ShowRedundancy(device=self.dev_iosv) parsed_output", "Te2/6\", \"name\": \"Transceiver Te2/6\", \"pid\": \"X2-10GB-SR\", \"sn\": \"FNS153920YJ\", \"vid\": \"V06", "export, distribute or use encryption. Importers, exporters, distributors and users", "VID: V02 , SN: ONT182746GZ NAME: \"1\", DESCR: \"WS-C1010XR-48FPS-I\" PID:", "\"VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0\", \"name\": \"VS-F6K-PFC4 Policy", "SOFTWARE (fc1) BOOTLDR: s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE", "= None self.device = Mock(**self.golden_output_4) obj = ShowInventory(device=self.device) parsed_output =", "1\": { \"name\": \"FAN-MOD-4HS 1\", \"descr\": \"High Speed Fan Module", "'name': '2821 chassis', 'pid': 'CISCO2821', 'sn': 'FTX1234AMWT', 'vid': 'V07 ',", "Model revision number : W0 Motherboard revision number : B0", "1986-2018 by Cisco Systems, Inc. Compiled Mon 22-Jan-18 04:07 by", "'descr': 'Gigabit(1000BaseT) module for EtherSwitch NM', 'name': 'Gigabit(1000BaseT) module for", "= None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_empty(self): self.device1 = Mock(**self.empty_output) obj", "self.device = Mock(**self.golden_output_active) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) parsed_output = platform_obj.parse(status='active')", "platform_obj = ShowProcessesCpuHistory(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self):", "} }, }, } } }, \"2\": { \"lc\": {", "Card 4 Rev. 1.2\", \"name\": \"WS-F6K-DFC4-E Distributed Forwarding Card 4", ": FDO2028F1WK Top Assembly Part Number : 800-38990-01 Top Assembly", "CISCO2821 , VID: V07 , SN: FTX1234AMWT NAME: \"VWIC2-2MFT-T1/E1 -", "= {'execute.return_value': ''' NAME: \"3825 chassis\", DESCR: \"3825 chassis\" PID:", "{ \"name\": \"FAN-MOD-4HS 1\", \"descr\": \"High Speed Fan Module for", "test_show_switch_detail(test_show_switch_detail_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitchDetail(device=self.dev1) with", "wide with parity enabled. 255K bytes of non-volatile configuration memory.", "23, \"five_sec_cpu_interrupts\": 0 } golden_output = {'execute.return_value': '''\\ show processes", "'2' }, 'mem_size': { 'flash-simulated non-volatile configuration': '512' }, 'curr_config_register':", "obj.parse(slot='0') class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def test_golden(self): self.device = Mock(**self.golden_output_serdes_internal) obj =", "'NM-16ESW': { 'descr': '16 Port 10BaseT/100BaseTX EtherSwitch', 'name': '16 Port", "'descr': 'Two-Port Fast Ethernet High Speed WAN Interface Card', 'name':", "Chassis System\" PID: WS-C6503-E , VID: V03, SN: FXS1821Q2H9 NAME:", "Power Supply 1', 'pid': 'C3KX-PWR-007CBA', 'sn': 'LTP13579L3R', 'vid': 'V01L ',", "NAME: \"Transceiver Te2/5\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/5\" PID: X2-10GB-SR", "{ \"version\": { \"last_reload_reason\": \"Unknown reason\", \"hostname\": \"N95_1\", \"os\": \"IOS\",", "Centralized Forwarding Card Rev. 4.1\", \"name\": \"WS-F6700-CFC Centralized Forwarding Card", "WS-C0123X-45T-S , VID: V00 , SN: FDO123R12W NAME: \"Switch 1", "WAN Interface Card', 'name': 'Two-Port Fast Ethernet High Speed WAN", "\"cat6k_tb1\", \"uptime\": \"10 weeks, 5 days, 5 hours, 16 minutes\",", ", VID: V01, SN: 10293847 NAME: \"PVDMII DSP SIMM with", "'license_udi': { 'device_num': { '*0': { 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6'", "ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') self.assertEqual(parsed_output, self.golden_parsed_output)", "DESCR: \"16 Port 10BaseT/100BaseTX EtherSwitch\" PID: NM-16ESW , VID: V01", "restarted at 05:06:40 GMT Tue Sep 10 2019 System image", "VID: V02, SN: SAL17152QB3 NAME: \"WS-F6K-DFC4-E Distributed Forwarding Card 4", "= ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe):", "NAME: \"16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2\", DESCR: \"16", "V01, SN: 10293847 NAME: \"PVDMII DSP SIMM with four DSPs", "\"GigabitEthernet1/0/49\", DESCR: \"1000BaseSX SFP\" PID: GLC-SX-MMD , VID: V01 ,", "Number : 0x05 Switch Ports Model SW Version SW Image", "self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware(test_show_platform_hardware_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj", "{ 'main': { 'chassis': { 'CISCO3845': { 'descr': '3845 chassis',", "10Gbase-SR Te2/4\" PID: X2-10GB-SR , VID: V06 , SN: ONT170202T5", "'V01', }, }, }, '1': { 'lc': { 'NM-1T3/E3=': {", "= Dir(device=self.dev1) with self.assertRaises(SchemaMissingKeyError): parsed_output = dir_obj.parse() def test_golden_iosv(self): self.maxDiff", "def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowProcessesCpu(device=self.device1) with self.assertRaises(SchemaEmptyParserError):", "ROM by reload at 10:26:47 EST Mon Dec 9 2019", "obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_9) class test_show_bootvar(unittest.TestCase):", "self.dev1 = Mock(**self.empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(AttributeError): parsered_output =", "'''} golden_parsed_output_3 = { \"main\": { \"chassis\": { \"WS-C6503-E\": {", "+00:00\", \"index\": \"264\", \"size\": \"0\", \"permissions\": \"drw-\" }, \"nvram\": {", "\"WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4\", \"pid\": \"WS-F6700-CFC\",", "512K). Configuration register is 0x2102 '''} golden_output_ios_1 = {'execute.return_value': '''\\", "VID: V01D , SN: DTN1504L0E9 NAME: \"TenGigabitEthernet1/1/1\", DESCR: \"SFP-10GBase-SR\" PID:", "day, 16 hours, 42 minutes System returned to ROM by", "{ \"non-volatile configuration\": \"256\" }, \"system_image\": \"flash0:/vios-adventerprisek9-m\", \"curr_config_register\": \"0x0\", \"rom\":", "VID: V02 , SN: FOC16062824 NAME: \"C3900 AC Power Supply", "golden_output_4 = {'execute.return_value': ''' NAME: \"1\", DESCR: \"WS-C8888X-88\" PID: WS-C0123X-45T-S", "\"ABC Power Supply\" PID: C3KX-PWR-350WAC , VID: V01D , SN:", "= Mock(**self.empty_output) platform_obj = ShowModule(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse()", "{ 'AIM-VPN/SSL-3': { 'descr': 'Encryption AIM Element', 'name': 'Virtual Private", "self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdes(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output =", "}, }, }, }, '1': { 'other': { 'EVM-HD-8FXS/DID': {", "'Six port FXO voice interface daughtercard', 'name': 'Six port FXO", "\"e1000_bia.txt\": { \"last_modified_date\": \"Oct 17 2018 18:57:18 +00:00\", \"index\": \"269\",", "parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_serdes_internal) def", "engine sub-module of 1\", \"pid\": \"WS-F6K-PFC3BXL\", \"sn\": \"SAL11434LYG\", \"vid\": \"V01\",", "NAME: \"Switch 1 - FlexStackPlus Module\", DESCR: \"Stacking Module\" PID:", "Density Voice Module - 8FXS/DID on Slot 1', 'pid': 'EVM-HD-8FXS/DID',", "\"system_image\": \"flash0:/vios-adventerprisek9-m\", \"curr_config_register\": \"0x0\", \"rom\": \"Bootstrap program is IOSv\", \"uptime\":", "12.2(18)SXF7, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2006", "Te2/5\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/5\" PID: X2-10GB-SR , VID:", "\"1989595136\" }, \"dir\": \"flash0:/\" } } golden_output_iosv = {'execute.return_value': '''\\", "\"chassis\": { \"WS-C6503-E\": { \"name\": \"WS-C6503-E\", \"descr\": \"Cisco Systems Catalyst", "with Gigabit Ethernet\" PID: CISCO3845-MB , VID: V09 , SN:", "Performance Engine 150 for Cisco 3900 ISR', 'name': 'Cisco Services", "FDO1633Q14M Model revision number : A0 Motherboard revision number :", "[ \"PIM Process\", \"IOSv e1000\", \"OSPF-1 Hello\" ], \"five_sec_cpu_interrupts\": 0", "DESCR: \"SM-ES2-16-P\" PID: SM-ES2-16-P , VID: , SN: FOC09876NP3 '''}", "ShowSwitchDetail(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff =", "Ethernet Rev. 3.4\" PID: WS-X6748-GE-TX , VID: V04, SN: SAL14017TWF", "{ 'descr': '16 Port 10BaseT/100BaseTX EtherSwitch', 'name': '16 Port 10BaseT/100BaseTX", "= ShowProcessesCpuPlatform(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = cpu_platform_obj.parse() class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def", "'hostname': 'sample_switch', 'uptime': '8 weeks, 3 days, 10 hours, 27", "self.assertEqual(parsed_output, self.golden_parsed_output_1) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowProcessesCpu(device=self.device1)", "}, }, } golden_output_8 = {'execute.return_value': ''' NAME: \"3825 chassis\",", "SAL1214LAG5 NAME: \"WS-C6503-E-FAN 1\", DESCR: \"Enhanced 3-slot Fan Tray 1\"", "26-Jun-13 09:56 by prod_rel_team Image text-base: 0x00003000, data-base: 0x02800000 ROM:", "uptime is 8 weeks, 3 days, 10 hours, 27 minutes", "DESCR: \"X2 Transceiver 10Gbase-SR Te2/4\" PID: X2-10GB-SR , VID: V06", "\"IOSv e1000\", \"OSPF-1 Hello\" ], \"five_sec_cpu_interrupts\": 0 } golden_output_1 =", "System restarted at 12:22:21 PDT Mon Sep 10 2018 System", "\"name\": \"VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1\",", "dir_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) dir_obj = Dir(device=self.dev1) with", "= None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitchDetail(device=self.dev_c3850) parsed_output =", "SAL1128UPQ9 NAME: \"switching engine sub-module of 2\", DESCR: \"WS-F6700-DFC3CXL Distributed", "\"VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0\", \"name\": \"msfc sub-module of 1\",", "{ \"other\": { \"PS 1 PWR-2700-AC/4\": { \"name\": \"PS 1", "' '15.2(3r)E, RELEASE SOFTWARE (fc1)', 'chassis': 'WS-C3750X-24P', 'chassis_sn': 'FDO2028F1WK', 'curr_config_register':", "self.golden_parsed_output_slot) def test_golden_subslot(self): self.device = Mock(**self.golden_output_subslot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output", "{ \"flash0:/\": { \"files\": { \"e1000_bia.txt\": { \"last_modified_date\": \"Oct 17", "V05 , SN: FTX6666ARJ9 NAME: \"c3845 Motherboard with Gigabit Ethernet", "obj = ShowProcessesCpuSorted(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self):", "\"2\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR Te2/2\", \"name\":", "'sn': 'ONT182746GZ', 'vid': 'V02 ', }, }, }, 'vid': 'V02", "IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2) Technical Support:", "\"IOS\", \"version_short\": \"15.6\", \"number_of_intfs\": { \"Gigabit Ethernet\": \"6\" }, \"version\":", "Ethernet': '2' }, 'mem_size': { 'flash-simulated non-volatile configuration': '512' },", "System restarted at 10:27:57 EST Mon Dec 9 2019 System", "\"rtr_type\": \"IOSv\", \"chassis_sn\": \"9K66Z7TOKAACDEQA24N7S\", \"chassis\": \"IOSv\", \"image_id\": \"VIOS-ADVENTERPRISEK9-M\", 'compiled_by': 'prod_rel_team',", "ShowBootvar, \\ ShowProcessesCpuSorted,\\ ShowProcessesCpu,\\ ShowVersionRp,\\ ShowPlatform,\\ ShowPlatformPower,\\ ShowProcessesCpuHistory,\\ ShowProcessesCpuPlatform,\\ ShowPlatformSoftwareStatusControl,\\", "RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2017 by", "dir_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_redundancy(unittest.TestCase): dev1 = Device(name='empty') dev_iosv =", "4 Rev. 1.2\", \"name\": \"WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL", "= None self.device = Mock(**self.golden_output_3) obj = ShowInventory(device=self.device) parsed_output =", "0.15% 0.07% 0.07% 0 OSPF-1 Hello '''} def test_empty(self): self.dev", "Slot 0 SubSlot 3\", DESCR: \"Two-Port Fast Ethernet High Speed", "{ 'device_num': { '*0': { 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6' }", "VID: V01, SN: DCH170900PF NAME: \"PS 1 PWR-2700-AC/4\", DESCR: \"2700W", "Mock(**self.golden_output_6) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_6) def", "Sep 10 2018 System image file is \"flash:c3750e-universalk9-mz\" This product", "def test_golden_1(self): self.maxDiff = None self.dev = Mock(**self.golden_output_1) obj =", "SN: FOC63358WSI NAME: \"High Density Voice Module - 8FXS/DID on", "Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) cat6k_tb1 uptime is", "None self.dev_iosv = Mock(**self.golden_output_iosv) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse()", "supply, 1400 watt 1\" PID: PWR-1400-AC , VID: V01, SN:", "PID: CISCO3945-CHASSIS , VID: V05 , SN: FGL161010K8 NAME: \"Cisco", "Supply\" PID: C3KX-PWR-350WAC , VID: V01D , SN: DTN1504L0E9 NAME:", "= Device(name='ios') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv", "Mock(**self.golden_output_iosv) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def", "device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def", "bytes of Flash internal SIMM (Sector size 512K). Configuration register", "\"name\": \"WS-C6503-E-FAN 1\", \"descr\": \"Enhanced 3-slot Fan Tray 1\", \"pid\":", "Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0\" PID: IOSv , VID: 1.0,", "= Mock(**self.golden_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output)", "Sep 10 2019', 'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type':", "V01 , SN: FOC98675W3E NAME: \"Virtual Private Network (VPN) Module", "Gigabit Ethernet', 'name': 'c3845 Motherboard with Gigabit Ethernet on Slot", "sorted 5min | inc CPU CPU utilization for five seconds:", "\"Transceiver Te2/3\", DESCR: \"X2 Transceiver 10Gbase-SR Te2/3\" PID: X2-10GB-SR ,", "(2091, 3086)', 'name': 'Wan Interface Card BRI U (2091, 3086)", "processor (revision 1.4) with 983008K/65536K bytes of memory. Processor board", "1Min 5Min TTY Process 368 362874 3321960 109 1.03% 0.54%", "\"VS-F6K-PFC4\": { \"descr\": \"VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0\",", "\"standby_failures\": \"0\" }, \"slot\": { \"slot 0\": { \"image_ver\": \"Cisco", "0', 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6', 'subslot': { '3': { 'HWIC-2FE':", "'FD2043B0K3', 'subslot': { '1': { 'C1010X-STACK': { 'descr': 'Stacking Module',", "'IOS', 'platform': 'C3750E', 'processor_type': 'PowerPC405', 'returned_to_rom_by': 'power-on', 'rom': 'Bootstrap program", "AIM-VPN/SSL-2 , VID: V01, SN: FOC2837465E '''} golden_parsed_output_7 = {", "(2091, 3086) on Slot 0 SubSlot 1\", DESCR: \"Wan Interface", "1\", \"pid\": \"CLK-7600\", \"vid\": \"\", \"sn\": \"FXS181101V4\", } } },", "PID: WS-C1010XR-48FPS-I, VID: V05 , SN: FD2043B0K3 NAME: \"Switch 1", "Performance Engine 150 for Cisco 3900 ISR on Slot 0',", "Daughterboard serial number : FDO202823P8 System serial number : FDO2028F1WK", "5.6\", \"pid\": \"WS-SUP720-3BXL\", \"vid\": \"V05\", \"sn\": \"SAL11434P2C\", \"subslot\": { \"0\":", "self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformPower(device=self.device) parsed_output", "self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch_detail(test_show_switch_detail_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj =", "obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None", "Peer (slot: 0) information is not available because it is", "}, 'PWR-C2-2929WAC': { 'descr': 'LLL Power Supply', 'name': 'Switch 1", "with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def test_golden(self): self.device", "processor (revision W0) with 262144K bytes of memory. Processor board", "Ethernet': '3', }, 'os': 'IOS', 'platform': 'C3900', 'processor_board_flash': '2000880K', 'processor_type':", "self.assertEqual(parsed_output, self.parsed_output) class test_dir(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv')", "}, }, \"4\": { \"X2-10GB-SR\": { \"descr\": \"X2 Transceiver 10Gbase-SR", "4-slot Chassis System\" PID: WS-C6504-E , VID: V01, SN: FXS1712Q1R8", "test_show_platform_hardware_qfp_statistics_drop_iosxe,\\ TestShowEnv as test_show_env_iosxe,\\ TestShowModule as test_show_module_iosxe,\\ TestShowSwitch as test_show_switch_iosxe,\\" ]
[ "param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params) ts_set = load_timeseries(build_rel_path(\"timeseries.json\")) notifications_ts = ts_set[\"notifications\"].rolling(7).mean().loc[350::7]", "API. import json plot_spec_filepath = build_rel_path(\"timeseries.json\") with open(plot_spec_filepath) as f:", "UniformPrior, BetaPrior,TruncNormalPrior from autumn.calibration.targets import ( NormalTarget, get_dispersion_priors_for_gaussian_targets, ) from", "Dispersion parameters based on targets *get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets), # Regional parameters", "np from autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning from autumn.core.project import Project, ParameterSet,", "use_tuned_proposal_sds(priors, build_rel_path(\"proposal_sds.yml\")) calibration = Calibration(priors, targets) # FIXME: Replace with", "from autumn.settings import Region, Models from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts #", "scenarios=scenario_params) ts_set = load_timeseries(build_rel_path(\"timeseries.json\")) notifications_ts = ts_set[\"notifications\"].rolling(7).mean().loc[350::7] death_ts = ts_set[\"infection_deaths\"].loc[350:]", "BetaPrior,TruncNormalPrior from autumn.calibration.targets import ( NormalTarget, get_dispersion_priors_for_gaussian_targets, ) from autumn.models.covid_19", "for sc_dict in all_scenario_dicts] param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params) ts_set =", "*get_dispersion_priors_for_gaussian_targets(targets), # Regional parameters UniformPrior(\"contact_rate\", [0.024, 0.027]), UniformPrior(\"infectious_seed\", [275.0, 450.0]),", "for i in range(7, 9)] mle_path = build_rel_path(\"params/mle-params.yml\") baseline_params =", "from autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning from autumn.core.project import Project, ParameterSet, load_timeseries,", "ts_set[\"notifications\"].rolling(7).mean().loc[350::7] death_ts = ts_set[\"infection_deaths\"].loc[350:] targets = [ NormalTarget(notifications_ts), NormalTarget(death_ts), ]", "autumn.calibration.targets import ( NormalTarget, get_dispersion_priors_for_gaussian_targets, ) from autumn.models.covid_19 import base_params,", "# Load and configure model parameters. default_path = build_rel_path(\"params/default.yml\") #scenario_paths", "[475, 530]), UniformPrior(\"voc_emergence.delta.contact_rate_multiplier\", [8.5, 11.5]), ] # Load proposal sds", "build_model, param_set, calibration, plots=plot_spec ) #perform_all_params_proposal_tuning(project, calibration, priors, n_points=50, relative_likelihood_reduction=0.2)", "import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, \\ use_tuned_proposal_sds from autumn.calibration", ") from autumn.models.covid_19 import base_params, build_model from autumn.settings import Region,", "Region.SRI_LANKA, Models.COVID_19, build_model, param_set, calibration, plots=plot_spec ) #perform_all_params_proposal_tuning(project, calibration, priors,", "calibration_format=True) all_scenario_dicts = get_all_scenario_dicts(\"LKA\") #scenario_params = [baseline_params.update(p) for p in", "= base_params.update(default_path).update(mle_path, calibration_format=True) all_scenario_dicts = get_all_scenario_dicts(\"LKA\") #scenario_params = [baseline_params.update(p) for", "autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path,", "p in scenario_paths] scenario_params = [baseline_params.update(sc_dict) for sc_dict in all_scenario_dicts]", "] priors = [ # Dispersion parameters based on targets", "build_model from autumn.settings import Region, Models from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts", "load_timeseries, build_rel_path, get_all_available_scenario_paths, \\ use_tuned_proposal_sds from autumn.calibration import Calibration from", "parameters UniformPrior(\"contact_rate\", [0.024, 0.027]), UniformPrior(\"infectious_seed\", [275.0, 450.0]), # Detection UniformPrior(\"testing_to_detection.assumed_cdr_parameter\",", "= build_rel_path(\"params/mle-params.yml\") baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True) all_scenario_dicts = get_all_scenario_dicts(\"LKA\") #scenario_params", "[ # Dispersion parameters based on targets *get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets), #", "sds from yml file # use_tuned_proposal_sds(priors, build_rel_path(\"proposal_sds.yml\")) calibration = Calibration(priors,", "\\ use_tuned_proposal_sds from autumn.calibration import Calibration from autumn.calibration.priors import UniformPrior,", "model parameters. default_path = build_rel_path(\"params/default.yml\") #scenario_paths = [build_rel_path(f\"params/scenario-{i}.yml\") for i", "targets *get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets), # Regional parameters UniformPrior(\"contact_rate\", [0.024, 0.027]), UniformPrior(\"infectious_seed\",", "in all_scenario_dicts] param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params) ts_set = load_timeseries(build_rel_path(\"timeseries.json\")) notifications_ts", "i in range(7, 9)] mle_path = build_rel_path(\"params/mle-params.yml\") baseline_params = base_params.update(default_path).update(mle_path,", "on targets *get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets), # Regional parameters UniformPrior(\"contact_rate\", [0.024, 0.027]),", "parameters. default_path = build_rel_path(\"params/default.yml\") #scenario_paths = [build_rel_path(f\"params/scenario-{i}.yml\") for i in", "NormalTarget, get_dispersion_priors_for_gaussian_targets, ) from autumn.models.covid_19 import base_params, build_model from autumn.settings", "Load proposal sds from yml file # use_tuned_proposal_sds(priors, build_rel_path(\"proposal_sds.yml\")) calibration", "configure model parameters. default_path = build_rel_path(\"params/default.yml\") #scenario_paths = [build_rel_path(f\"params/scenario-{i}.yml\") for", "from yml file # use_tuned_proposal_sds(priors, build_rel_path(\"proposal_sds.yml\")) calibration = Calibration(priors, targets)", "= build_rel_path(\"timeseries.json\") with open(plot_spec_filepath) as f: plot_spec = json.load(f) project", "= Project( Region.SRI_LANKA, Models.COVID_19, build_model, param_set, calibration, plots=plot_spec ) #perform_all_params_proposal_tuning(project,", "get_dispersion_priors_for_gaussian_targets, ) from autumn.models.covid_19 import base_params, build_model from autumn.settings import", "yml file # use_tuned_proposal_sds(priors, build_rel_path(\"proposal_sds.yml\")) calibration = Calibration(priors, targets) #", "FIXME: Replace with flexible Python plot request API. import json", "= ParameterSet(baseline=baseline_params, scenarios=scenario_params) ts_set = load_timeseries(build_rel_path(\"timeseries.json\")) notifications_ts = ts_set[\"notifications\"].rolling(7).mean().loc[350::7] death_ts", "as np from autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning from autumn.core.project import Project,", "0.027]), UniformPrior(\"infectious_seed\", [275.0, 450.0]), # Detection UniformPrior(\"testing_to_detection.assumed_cdr_parameter\", [0.009, 0.025]), UniformPrior(\"infection_fatality.multiplier\",", "NormalTarget(death_ts), ] priors = [ # Dispersion parameters based on", "from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts # Load and configure model parameters.", "import numpy as np from autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning from autumn.core.project", "and configure model parameters. default_path = build_rel_path(\"params/default.yml\") #scenario_paths = [build_rel_path(f\"params/scenario-{i}.yml\")", "death_ts = ts_set[\"infection_deaths\"].loc[350:] targets = [ NormalTarget(notifications_ts), NormalTarget(death_ts), ] priors", "autumn.settings import Region, Models from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts # Load", "all_scenario_dicts] param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params) ts_set = load_timeseries(build_rel_path(\"timeseries.json\")) notifications_ts =", "= Calibration(priors, targets) # FIXME: Replace with flexible Python plot", "import Calibration from autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior from autumn.calibration.targets import", "[370, 410]), UniformPrior(\"voc_emergence.alpha_beta.contact_rate_multiplier\", [3.2, 4.5]), UniformPrior(\"voc_emergence.delta.start_time\", [475, 530]), UniformPrior(\"voc_emergence.delta.contact_rate_multiplier\", [8.5,", "proposal sds from yml file # use_tuned_proposal_sds(priors, build_rel_path(\"proposal_sds.yml\")) calibration =", "autumn.calibration import Calibration from autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior from autumn.calibration.targets", "for p in scenario_paths] scenario_params = [baseline_params.update(sc_dict) for sc_dict in", "autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior from autumn.calibration.targets import ( NormalTarget, get_dispersion_priors_for_gaussian_targets,", "[build_rel_path(f\"params/scenario-{i}.yml\") for i in range(7, 9)] mle_path = build_rel_path(\"params/mle-params.yml\") baseline_params", "Detection UniformPrior(\"testing_to_detection.assumed_cdr_parameter\", [0.009, 0.025]), UniformPrior(\"infection_fatality.multiplier\", [0.09, 0.13]), #VoC UniformPrior(\"voc_emergence.alpha_beta.start_time\", [370,", "UniformPrior(\"voc_emergence.alpha_beta.contact_rate_multiplier\", [3.2, 4.5]), UniformPrior(\"voc_emergence.delta.start_time\", [475, 530]), UniformPrior(\"voc_emergence.delta.contact_rate_multiplier\", [8.5, 11.5]), ]", "ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, \\ use_tuned_proposal_sds from autumn.calibration import Calibration", "[0.024, 0.027]), UniformPrior(\"infectious_seed\", [275.0, 450.0]), # Detection UniformPrior(\"testing_to_detection.assumed_cdr_parameter\", [0.009, 0.025]),", "in scenario_paths] scenario_params = [baseline_params.update(sc_dict) for sc_dict in all_scenario_dicts] param_set", "import ( NormalTarget, get_dispersion_priors_for_gaussian_targets, ) from autumn.models.covid_19 import base_params, build_model", "base_params.update(default_path).update(mle_path, calibration_format=True) all_scenario_dicts = get_all_scenario_dicts(\"LKA\") #scenario_params = [baseline_params.update(p) for p", "#scenario_params = [baseline_params.update(p) for p in scenario_paths] scenario_params = [baseline_params.update(sc_dict)", "NormalTarget(notifications_ts), NormalTarget(death_ts), ] priors = [ # Dispersion parameters based", "import Region, Models from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts # Load and", "450.0]), # Detection UniformPrior(\"testing_to_detection.assumed_cdr_parameter\", [0.009, 0.025]), UniformPrior(\"infection_fatality.multiplier\", [0.09, 0.13]), #VoC", "calibration = Calibration(priors, targets) # FIXME: Replace with flexible Python", "range(7, 9)] mle_path = build_rel_path(\"params/mle-params.yml\") baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True) all_scenario_dicts", "UniformPrior(\"voc_emergence.alpha_beta.start_time\", [370, 410]), UniformPrior(\"voc_emergence.alpha_beta.contact_rate_multiplier\", [3.2, 4.5]), UniformPrior(\"voc_emergence.delta.start_time\", [475, 530]), UniformPrior(\"voc_emergence.delta.contact_rate_multiplier\",", "import base_params, build_model from autumn.settings import Region, Models from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder", "in range(7, 9)] mle_path = build_rel_path(\"params/mle-params.yml\") baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True)", "json.load(f) project = Project( Region.SRI_LANKA, Models.COVID_19, build_model, param_set, calibration, plots=plot_spec", "from autumn.calibration.targets import ( NormalTarget, get_dispersion_priors_for_gaussian_targets, ) from autumn.models.covid_19 import", "= load_timeseries(build_rel_path(\"timeseries.json\")) notifications_ts = ts_set[\"notifications\"].rolling(7).mean().loc[350::7] death_ts = ts_set[\"infection_deaths\"].loc[350:] targets =", "mle_path = build_rel_path(\"params/mle-params.yml\") baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True) all_scenario_dicts = get_all_scenario_dicts(\"LKA\")", "Region, Models from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts # Load and configure", "all_scenario_dicts = get_all_scenario_dicts(\"LKA\") #scenario_params = [baseline_params.update(p) for p in scenario_paths]", "sc_dict in all_scenario_dicts] param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params) ts_set = load_timeseries(build_rel_path(\"timeseries.json\"))", "priors = [ # Dispersion parameters based on targets *get_dispersion_priors_for_gaussian_targets(targets),", "UniformPrior(\"testing_to_detection.assumed_cdr_parameter\", [0.009, 0.025]), UniformPrior(\"infection_fatality.multiplier\", [0.09, 0.13]), #VoC UniformPrior(\"voc_emergence.alpha_beta.start_time\", [370, 410]),", "Calibration from autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior from autumn.calibration.targets import (", "Load and configure model parameters. default_path = build_rel_path(\"params/default.yml\") #scenario_paths =", "= ts_set[\"infection_deaths\"].loc[350:] targets = [ NormalTarget(notifications_ts), NormalTarget(death_ts), ] priors =", "[0.09, 0.13]), #VoC UniformPrior(\"voc_emergence.alpha_beta.start_time\", [370, 410]), UniformPrior(\"voc_emergence.alpha_beta.contact_rate_multiplier\", [3.2, 4.5]), UniformPrior(\"voc_emergence.delta.start_time\",", "from autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior from autumn.calibration.targets import ( NormalTarget,", "f: plot_spec = json.load(f) project = Project( Region.SRI_LANKA, Models.COVID_19, build_model,", "# Regional parameters UniformPrior(\"contact_rate\", [0.024, 0.027]), UniformPrior(\"infectious_seed\", [275.0, 450.0]), #", "build_rel_path(\"proposal_sds.yml\")) calibration = Calibration(priors, targets) # FIXME: Replace with flexible", "0.13]), #VoC UniformPrior(\"voc_emergence.alpha_beta.start_time\", [370, 410]), UniformPrior(\"voc_emergence.alpha_beta.contact_rate_multiplier\", [3.2, 4.5]), UniformPrior(\"voc_emergence.delta.start_time\", [475,", "UniformPrior(\"infectious_seed\", [275.0, 450.0]), # Detection UniformPrior(\"testing_to_detection.assumed_cdr_parameter\", [0.009, 0.025]), UniformPrior(\"infection_fatality.multiplier\", [0.09,", "*get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets), # Regional parameters UniformPrior(\"contact_rate\", [0.024, 0.027]), UniformPrior(\"infectious_seed\", [275.0,", "UniformPrior(\"infection_fatality.multiplier\", [0.09, 0.13]), #VoC UniformPrior(\"voc_emergence.alpha_beta.start_time\", [370, 410]), UniformPrior(\"voc_emergence.alpha_beta.contact_rate_multiplier\", [3.2, 4.5]),", "request API. import json plot_spec_filepath = build_rel_path(\"timeseries.json\") with open(plot_spec_filepath) as", "= json.load(f) project = Project( Region.SRI_LANKA, Models.COVID_19, build_model, param_set, calibration,", "0.025]), UniformPrior(\"infection_fatality.multiplier\", [0.09, 0.13]), #VoC UniformPrior(\"voc_emergence.alpha_beta.start_time\", [370, 410]), UniformPrior(\"voc_emergence.alpha_beta.contact_rate_multiplier\", [3.2,", "= ts_set[\"notifications\"].rolling(7).mean().loc[350::7] death_ts = ts_set[\"infection_deaths\"].loc[350:] targets = [ NormalTarget(notifications_ts), NormalTarget(death_ts),", "with flexible Python plot request API. import json plot_spec_filepath =", "import UniformPrior, BetaPrior,TruncNormalPrior from autumn.calibration.targets import ( NormalTarget, get_dispersion_priors_for_gaussian_targets, )", "= [ # Dispersion parameters based on targets *get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets),", "[0.009, 0.025]), UniformPrior(\"infection_fatality.multiplier\", [0.09, 0.13]), #VoC UniformPrior(\"voc_emergence.alpha_beta.start_time\", [370, 410]), UniformPrior(\"voc_emergence.alpha_beta.contact_rate_multiplier\",", "default_path = build_rel_path(\"params/default.yml\") #scenario_paths = [build_rel_path(f\"params/scenario-{i}.yml\") for i in range(7,", "from autumn.calibration import Calibration from autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior from", "#VoC UniformPrior(\"voc_emergence.alpha_beta.start_time\", [370, 410]), UniformPrior(\"voc_emergence.alpha_beta.contact_rate_multiplier\", [3.2, 4.5]), UniformPrior(\"voc_emergence.delta.start_time\", [475, 530]),", "perform_all_params_proposal_tuning from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, \\", "= [build_rel_path(f\"params/scenario-{i}.yml\") for i in range(7, 9)] mle_path = build_rel_path(\"params/mle-params.yml\")", "ts_set = load_timeseries(build_rel_path(\"timeseries.json\")) notifications_ts = ts_set[\"notifications\"].rolling(7).mean().loc[350::7] death_ts = ts_set[\"infection_deaths\"].loc[350:] targets", "#scenario_paths = [build_rel_path(f\"params/scenario-{i}.yml\") for i in range(7, 9)] mle_path =", "# use_tuned_proposal_sds(priors, build_rel_path(\"proposal_sds.yml\")) calibration = Calibration(priors, targets) # FIXME: Replace", "build_rel_path(\"params/default.yml\") #scenario_paths = [build_rel_path(f\"params/scenario-{i}.yml\") for i in range(7, 9)] mle_path", "530]), UniformPrior(\"voc_emergence.delta.contact_rate_multiplier\", [8.5, 11.5]), ] # Load proposal sds from", "UniformPrior(\"voc_emergence.delta.start_time\", [475, 530]), UniformPrior(\"voc_emergence.delta.contact_rate_multiplier\", [8.5, 11.5]), ] # Load proposal", "<filename>autumn/projects/covid_19/sri_lanka/sri_lanka/project.py import numpy as np from autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning from", "get_all_scenario_dicts(\"LKA\") #scenario_params = [baseline_params.update(p) for p in scenario_paths] scenario_params =", "= [ NormalTarget(notifications_ts), NormalTarget(death_ts), ] priors = [ # Dispersion", "autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts # Load and configure model parameters. default_path", "with open(plot_spec_filepath) as f: plot_spec = json.load(f) project = Project(", "based on targets *get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets), # Regional parameters UniformPrior(\"contact_rate\", [0.024,", "from autumn.models.covid_19 import base_params, build_model from autumn.settings import Region, Models", "baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True) all_scenario_dicts = get_all_scenario_dicts(\"LKA\") #scenario_params = [baseline_params.update(p)", "ParameterSet(baseline=baseline_params, scenarios=scenario_params) ts_set = load_timeseries(build_rel_path(\"timeseries.json\")) notifications_ts = ts_set[\"notifications\"].rolling(7).mean().loc[350::7] death_ts =", "autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, \\ use_tuned_proposal_sds from", "[baseline_params.update(p) for p in scenario_paths] scenario_params = [baseline_params.update(sc_dict) for sc_dict", "targets) # FIXME: Replace with flexible Python plot request API.", "project = Project( Region.SRI_LANKA, Models.COVID_19, build_model, param_set, calibration, plots=plot_spec )", "ts_set[\"infection_deaths\"].loc[350:] targets = [ NormalTarget(notifications_ts), NormalTarget(death_ts), ] priors = [", "Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, \\ use_tuned_proposal_sds from autumn.calibration import", "open(plot_spec_filepath) as f: plot_spec = json.load(f) project = Project( Region.SRI_LANKA,", "from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, \\ use_tuned_proposal_sds", "get_all_available_scenario_paths, \\ use_tuned_proposal_sds from autumn.calibration import Calibration from autumn.calibration.priors import", "Regional parameters UniformPrior(\"contact_rate\", [0.024, 0.027]), UniformPrior(\"infectious_seed\", [275.0, 450.0]), # Detection", "[275.0, 450.0]), # Detection UniformPrior(\"testing_to_detection.assumed_cdr_parameter\", [0.009, 0.025]), UniformPrior(\"infection_fatality.multiplier\", [0.09, 0.13]),", "scenario_params = [baseline_params.update(sc_dict) for sc_dict in all_scenario_dicts] param_set = ParameterSet(baseline=baseline_params,", "[8.5, 11.5]), ] # Load proposal sds from yml file", "Models.COVID_19, build_model, param_set, calibration, plots=plot_spec ) #perform_all_params_proposal_tuning(project, calibration, priors, n_points=50,", "= get_all_scenario_dicts(\"LKA\") #scenario_params = [baseline_params.update(p) for p in scenario_paths] scenario_params", "Models from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts # Load and configure model", "= [baseline_params.update(p) for p in scenario_paths] scenario_params = [baseline_params.update(sc_dict) for", "build_rel_path(\"timeseries.json\") with open(plot_spec_filepath) as f: plot_spec = json.load(f) project =", "# FIXME: Replace with flexible Python plot request API. import", "4.5]), UniformPrior(\"voc_emergence.delta.start_time\", [475, 530]), UniformPrior(\"voc_emergence.delta.contact_rate_multiplier\", [8.5, 11.5]), ] # Load", "load_timeseries(build_rel_path(\"timeseries.json\")) notifications_ts = ts_set[\"notifications\"].rolling(7).mean().loc[350::7] death_ts = ts_set[\"infection_deaths\"].loc[350:] targets = [", "= [baseline_params.update(sc_dict) for sc_dict in all_scenario_dicts] param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params)", "use_tuned_proposal_sds from autumn.calibration import Calibration from autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior", "Calibration(priors, targets) # FIXME: Replace with flexible Python plot request", "import json plot_spec_filepath = build_rel_path(\"timeseries.json\") with open(plot_spec_filepath) as f: plot_spec", "= build_rel_path(\"params/default.yml\") #scenario_paths = [build_rel_path(f\"params/scenario-{i}.yml\") for i in range(7, 9)]", "parameters based on targets *get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets), # Regional parameters UniformPrior(\"contact_rate\",", "[3.2, 4.5]), UniformPrior(\"voc_emergence.delta.start_time\", [475, 530]), UniformPrior(\"voc_emergence.delta.contact_rate_multiplier\", [8.5, 11.5]), ] #", "json plot_spec_filepath = build_rel_path(\"timeseries.json\") with open(plot_spec_filepath) as f: plot_spec =", "autumn.models.covid_19 import base_params, build_model from autumn.settings import Region, Models from", "flexible Python plot request API. import json plot_spec_filepath = build_rel_path(\"timeseries.json\")", "( NormalTarget, get_dispersion_priors_for_gaussian_targets, ) from autumn.models.covid_19 import base_params, build_model from", "build_rel_path, get_all_available_scenario_paths, \\ use_tuned_proposal_sds from autumn.calibration import Calibration from autumn.calibration.priors", "plot_spec = json.load(f) project = Project( Region.SRI_LANKA, Models.COVID_19, build_model, param_set,", "UniformPrior(\"voc_emergence.delta.contact_rate_multiplier\", [8.5, 11.5]), ] # Load proposal sds from yml", "plot request API. import json plot_spec_filepath = build_rel_path(\"timeseries.json\") with open(plot_spec_filepath)", "11.5]), ] # Load proposal sds from yml file #", "9)] mle_path = build_rel_path(\"params/mle-params.yml\") baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True) all_scenario_dicts =", "UniformPrior(\"contact_rate\", [0.024, 0.027]), UniformPrior(\"infectious_seed\", [275.0, 450.0]), # Detection UniformPrior(\"testing_to_detection.assumed_cdr_parameter\", [0.009,", "scenario_paths] scenario_params = [baseline_params.update(sc_dict) for sc_dict in all_scenario_dicts] param_set =", "[ NormalTarget(notifications_ts), NormalTarget(death_ts), ] priors = [ # Dispersion parameters", "# Load proposal sds from yml file # use_tuned_proposal_sds(priors, build_rel_path(\"proposal_sds.yml\"))", "import get_all_scenario_dicts # Load and configure model parameters. default_path =", "targets = [ NormalTarget(notifications_ts), NormalTarget(death_ts), ] priors = [ #", "] # Load proposal sds from yml file # use_tuned_proposal_sds(priors,", "base_params, build_model from autumn.settings import Region, Models from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import", "Replace with flexible Python plot request API. import json plot_spec_filepath", "get_all_scenario_dicts # Load and configure model parameters. default_path = build_rel_path(\"params/default.yml\")", "build_rel_path(\"params/mle-params.yml\") baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True) all_scenario_dicts = get_all_scenario_dicts(\"LKA\") #scenario_params =", "Project( Region.SRI_LANKA, Models.COVID_19, build_model, param_set, calibration, plots=plot_spec ) #perform_all_params_proposal_tuning(project, calibration,", "as f: plot_spec = json.load(f) project = Project( Region.SRI_LANKA, Models.COVID_19,", "notifications_ts = ts_set[\"notifications\"].rolling(7).mean().loc[350::7] death_ts = ts_set[\"infection_deaths\"].loc[350:] targets = [ NormalTarget(notifications_ts),", "[baseline_params.update(sc_dict) for sc_dict in all_scenario_dicts] param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params) ts_set", "# Dispersion parameters based on targets *get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets), # Regional", "import perform_all_params_proposal_tuning from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths,", "# Detection UniformPrior(\"testing_to_detection.assumed_cdr_parameter\", [0.009, 0.025]), UniformPrior(\"infection_fatality.multiplier\", [0.09, 0.13]), #VoC UniformPrior(\"voc_emergence.alpha_beta.start_time\",", "file # use_tuned_proposal_sds(priors, build_rel_path(\"proposal_sds.yml\")) calibration = Calibration(priors, targets) # FIXME:", "Python plot request API. import json plot_spec_filepath = build_rel_path(\"timeseries.json\") with", "plot_spec_filepath = build_rel_path(\"timeseries.json\") with open(plot_spec_filepath) as f: plot_spec = json.load(f)", "numpy as np from autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning from autumn.core.project import", "410]), UniformPrior(\"voc_emergence.alpha_beta.contact_rate_multiplier\", [3.2, 4.5]), UniformPrior(\"voc_emergence.delta.start_time\", [475, 530]), UniformPrior(\"voc_emergence.delta.contact_rate_multiplier\", [8.5, 11.5])," ]
[ "'Subtheme renamed' and the Subtheme name has been changed \"\"\"", "= SubTheme.get_by_name('_TEST_SUB_THEME_') if not subtheme: subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_') subtheme.save()", "test :return: An authorization header \"\"\" response_login = self.client.post('/login', data=dict(email=self.user.email,", "user = Users(\"Admin\", \"<EMAIL>\", password_hash.decode(\"utf8\"), True, True) try: user.save() user.commit()", "unittest import TestCase import bcrypt from flask.ctx import AppContext from", "headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def tearDown(self): \"\"\" Handle the cleanup after", "= self.client.post('/admin/themes/delete_subtheme', json={\"name\": \"weA_gfj24fhurtyui\", \"theme_id\": -1}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def", "= SubTheme(self.theme.id, '_TEST_SUB_THEME_') subtheme.save() subtheme.commit() subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') return subtheme", "current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_non_existant_subtheme(self): \"\"\" Rename a SubTheme that", "= Theme.get_by_name(\"_test_add_Subtheme_\") if not self.theme: self.theme = Theme(\"_test_add_Subtheme_\") self.theme.save() self.theme.commit()", "except Exception as e: pass return user def get_auth_header(self) ->", "\"\"\" self.client, self.app_context = self.create_test_client() self.user = self.create_admin_user() self.auth_header =", "tests \"\"\" subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') if not subtheme: subtheme =", "if not subtheme: subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_') subtheme.save() subtheme.commit() subtheme", "a SubTheme by id and check the clients response status", "not exist and check the client response status code for", "test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True) testing_client = test_app.test_client() test_app_context = test_app.app_context()", "a dummy theme \"\"\" self.client, self.app_context = self.create_test_client() self.user =", "\"current_name\": current_name, \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response =", "deleting of Themes \"\"\" def setUp(self): \"\"\" Setup a FlaskClient", "= create_app(DATABASE_NAME='test_analysis', TESTING=True) testing_client = test_app.test_client() test_app_context = test_app.app_context() test_app_context.push()", "= self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"id\": self.subtheme.id}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT)", "check the client response status code for http status 200", "by id and check the client response status code for", "= self.client.post('/admin/themes/delete_subtheme', json={\"id\": self.subtheme.id}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def test_delete_subtheme_by_theme_id_and_name(self): \"\"\"", "TestSubTemes(TestCase): \"\"\" Unittest for the creation, renaming and deleting of", "Admin user for tests :return: an admin user for tests", "response = self.client.post('/admin/themes/delete_subtheme', json={\"id\": self.subtheme.id}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def test_delete_subtheme_by_theme_id_and_name(self):", "Authorization header for test :return: An authorization header \"\"\" response_login", "def test_delete_subtheme_by_theme_id_and_name(self): \"\"\" Delete a SubTheme by theme_id and name:", "self.create_admin_user() self.auth_header = self.get_auth_header() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") if not self.theme:", "-> {str: str}: \"\"\" Create an Authorization header for test", "name has been changed \"\"\" if not self.subtheme: self.subtheme =", "Check response data for the expected message 'Subtheme renamed' and", "= SubTheme.get_by_name(\"_TEST_SUB_THEME_2\") if test_sub: test_sub.delete() test_sub.commit() if self.theme: self.theme.delete() self.theme.commit()", "dummy theme \"\"\" self.client, self.app_context = self.create_test_client() self.user = self.create_admin_user()", "= test_app.app_context() test_app_context.push() return testing_client, test_app_context def create_dummy_subtheme(self) -> SubTheme:", "clients response status code for http status 404 (OK) \"\"\"", "for tests :return: SubTheme for tests \"\"\" subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_')", "cleanup after tests\"\"\" self.subtheme = SubTheme.get_by_name(\"new_name_not_1\") if not self.subtheme: self.subtheme", "HTTPStatus.NO_CONTENT) def test_delete_subtheme_by_theme_id_and_name(self): \"\"\" Delete a SubTheme by theme_id and", "= self.client.post('/admin/themes/delete_subtheme', json={\"theme_id\": self.subtheme.t_id, \"name\": self.subtheme.name}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def", "a new SubTheme and check the client response status code", "bcrypt.gensalt()) user = Users.find_by_email(\"<EMAIL>\") if not user: user = Users(\"Admin\",", "\"Subtheme renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_non_existant_subtheme(self): \"\"\" Rename", "= Users.find_by_email(\"<EMAIL>\") if not user: user = Users(\"Admin\", \"<EMAIL>\", password_hash.decode(\"utf8\"),", "import FlaskClient from app import create_app from models.theme import Theme,", "status code for http status 200 (OK) Check JSON response", "self.subtheme = self.create_dummy_subtheme() def create_test_client(self) -> (FlaskClient, AppContext): \"\"\" Create", "flask testing client :return: FlaskClient for tests and AppContext \"\"\"", "message 'Subtheme renamed' and the Subtheme name has been changed", "SubTheme that does not exist and check the client response", "404 \"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response =", "for the creation, renaming and deleting of Themes \"\"\" def", "response = response.get_json() self.assertEqual(response[\"id\"], self.subtheme.id) self.assertEqual(response[\"message\"], \"Subtheme renamed\") self.assertEqual(response[\"old_name\"], current_name)", "SubTheme and check the client response status code for http", "json={\"theme_id\": self.subtheme.t_id, \"name\": self.subtheme.name}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def tearDown(self): \"\"\"", "bcrypt.hashpw(\"<PASSWORD>\".encode(\"utf-8\"), bcrypt.gensalt()) user = Users.find_by_email(\"<EMAIL>\") if not user: user =", "not self.subtheme: self.subtheme = SubTheme.get_by_name(\"_TEST_SUB_THEME_\") if self.subtheme: self.subtheme.delete() self.subtheme.commit() test_sub", "user.save() user.commit() except Exception as e: pass return user def", "Delete a SubTheme by theme_id and name: check the client", "(OK) \"\"\" response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": -1, \"current_name\": \"a3d4f5g6h7j8k0\", \"new_name\":", "self.client.post('/admin/themes/delete_subtheme', json={\"name\": \"weA_gfj24fhurtyui\", \"theme_id\": -1}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_subtheme_by_id(self):", "http status 404 \"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme()", "self.theme.delete() self.theme.commit() self.client.post('/logout', headers=self.auth_header) if self.user: self.user.delete() self.user.commit() self.app_context.pop() if", "and check the client response status code for http status", "import bcrypt from flask.ctx import AppContext from flask.testing import FlaskClient", "self.client.post('/login', data=dict(email=self.user.email, password=\"<PASSWORD>\", remember=True), follow_redirects=True) response_login_json = response_login.get_json() return {'Authorization':", "Subtheme name has been changed \"\"\" if not self.subtheme: self.subtheme", "Check JSON response data for the expected message 'New theme", "for tests \"\"\" subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') if not subtheme: subtheme", "SubTheme(self.theme.id, '_TEST_SUB_THEME_') subtheme.save() subtheme.commit() subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') return subtheme def", "pass return user def get_auth_header(self) -> {str: str}: \"\"\" Create", "\"weA_gfj24fhurtyui\", \"theme_id\": -1}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_subtheme_by_id(self): \"\"\" Delete", "\"\"\" Create an Authorization header for test :return: An authorization", "Setup a FlaskClient for testing, creates an admin user and", "def test_rename_non_existant_subtheme(self): \"\"\" Rename a SubTheme that does not exist", "not user: user = Users(\"Admin\", \"<EMAIL>\", password_hash.decode(\"utf8\"), True, True) try:", "= self.get_auth_header() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") if not self.theme: self.theme =", "test_app_context def create_dummy_subtheme(self) -> SubTheme: \"\"\" Create SubTheme for tests", "True, True) try: user.save() user.commit() except Exception as e: pass", "from app import create_app from models.theme import Theme, SubTheme from", "self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"theme_id\": self.subtheme.t_id, \"name\": self.subtheme.name},", "Flask Client and a dummy theme \"\"\" self.client, self.app_context =", "self.user = self.create_admin_user() self.auth_header = self.get_auth_header() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") if", "that does not exist and check the clients response status", "SubTheme from models.users import Users class TestSubTemes(TestCase): \"\"\" Unittest for", "= self.client.post('/admin/themes/rename_subtheme', json={\"id\": self.subtheme.id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\" }, headers=self.auth_header)", "from flask.testing import FlaskClient from app import create_app from models.theme", "theme_id and check the clients response status code for http", "user for tests \"\"\" password_hash = bcrypt.hashpw(\"<PASSWORD>\".encode(\"utf-8\"), bcrypt.gensalt()) user =", "= self.create_admin_user() self.auth_header = self.get_auth_header() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") if not", "authorization header \"\"\" response_login = self.client.post('/login', data=dict(email=self.user.email, password=\"<PASSWORD>\", remember=True), follow_redirects=True)", "Users class TestSubTemes(TestCase): \"\"\" Unittest for the creation, renaming and", "header for requests to the Flask Client and a dummy", "an admin user for tests \"\"\" password_hash = bcrypt.hashpw(\"<PASSWORD>\".encode(\"utf-8\"), bcrypt.gensalt())", "test_app.test_client() test_app_context = test_app.app_context() test_app_context.push() return testing_client, test_app_context def create_dummy_subtheme(self)", "does not exist and check the clients response status code", "= response.get_json() self.assertEqual(response[\"id\"], self.subtheme.id) self.assertEqual(response[\"message\"], \"Subtheme renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"],", "the creation, renaming and deleting of Themes \"\"\" def setUp(self):", "SubTheme for tests :return: SubTheme for tests \"\"\" subtheme =", "and creates the authorization header for requests to the Flask", "response = self.client.post('/admin/themes/rename_subtheme', json={\"id\": self.subtheme.id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\" },", "for http status 404 \"\"\" if not self.subtheme: self.subtheme =", "\"<EMAIL>\", password_hash.decode(\"utf8\"), True, True) try: user.save() user.commit() except Exception as", "def test_add_subtheme(self): \"\"\" Create a new SubTheme and check the", "if self.theme: self.theme.delete() self.theme.commit() self.client.post('/logout', headers=self.auth_header) if self.user: self.user.delete() self.user.commit()", "created\") self.assertEqual(json_response[\"theme_id\"], self.theme.id) self.assertEqual(json_response[\"subtheme\"], \"_TEST_SUB_THEME_2\") def test_rename_subtheme_theme_id(self): \"\"\" Rename a", "for testing, creates an admin user and creates the authorization", "= self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": -1, \"current_name\": \"a3d4f5g6h7j8k0\", \"new_name\": \"new_name_not_1\" }, headers=self.auth_header)", "if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"id\":", "response.get_json() self.assertEqual(response[\"id\"], self.subtheme.id) self.assertEqual(response[\"message\"], \"Subtheme renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\")", "current_name, \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response = response.get_json()", "= self.client.post('/login', data=dict(email=self.user.email, password=\"<PASSWORD>\", remember=True), follow_redirects=True) response_login_json = response_login.get_json() return", "unittest from http import HTTPStatus from unittest import TestCase import", "Create SubTheme for tests :return: SubTheme for tests \"\"\" subtheme", "Users: \"\"\" Create Admin user for tests :return: an admin", "response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": -1, \"current_name\": \"a3d4f5g6h7j8k0\", \"new_name\": \"new_name_not_1\" },", "\"\"\" Handle the cleanup after tests\"\"\" self.subtheme = SubTheme.get_by_name(\"new_name_not_1\") if", "self.client.post('/logout', headers=self.auth_header) if self.user: self.user.delete() self.user.commit() self.app_context.pop() if __name__ ==", "'_TEST_SUB_THEME_') subtheme.save() subtheme.commit() subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') return subtheme def create_admin_user(self)", "the Subtheme name has been changed \"\"\" if not self.subtheme:", "{'Authorization': 'Bearer {}'.format(response_login_json[\"access_token\"])} def test_add_subtheme(self): \"\"\" Create a new SubTheme", "an admin user and creates the authorization header for requests", "flask.testing import FlaskClient from app import create_app from models.theme import", "data=dict(email=self.user.email, password=\"<PASSWORD>\", remember=True), follow_redirects=True) response_login_json = response_login.get_json() return {'Authorization': 'Bearer", "= self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": self.subtheme.t_id,", "response data for the expected message 'New theme created' and", "self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_subtheme_id(self): \"\"\" Rename a SubTheme by id", "and AppContext \"\"\" test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True) testing_client = test_app.test_client()", "= SubTheme.get_by_name(\"_TEST_SUB_THEME_\") if self.subtheme: self.subtheme.delete() self.subtheme.commit() test_sub = SubTheme.get_by_name(\"_TEST_SUB_THEME_2\") if", "\"\"\" subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') if not subtheme: subtheme = SubTheme(self.theme.id,", "Users(\"Admin\", \"<EMAIL>\", password_hash.decode(\"utf8\"), True, True) try: user.save() user.commit() except Exception", "self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": -1, \"current_name\": \"a3d4f5g6h7j8k0\", \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code,", "self.assertEqual(response[\"message\"], \"Subtheme renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_non_existant_subtheme(self): \"\"\"", "= self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"name\": \"weA_gfj24fhurtyui\", \"theme_id\": -1}, headers=self.auth_header)", "self.subtheme.t_id, \"name\": self.subtheme.name}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def tearDown(self): \"\"\" Handle", "and check the clients response status code for http status", "\"new_name_not_1\") def test_rename_non_existant_subtheme(self): \"\"\" Rename a SubTheme that does not", "created' and Theme name \"\"\" response = self.client.post('/admin/themes/add_subtheme', json={\"theme_id\": self.theme.id,", "tests :return: SubTheme for tests \"\"\" subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') if", "response status code for http status 200 (OK) Check response", "create_admin_user(self) -> Users: \"\"\" Create Admin user for tests :return:", "\"_TEST_SUB_THEME_2\"}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) json_response = response.get_json() self.assertEqual(json_response[\"message\"], \"sub theme", "changed \"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme() current_name =", "user for tests :return: an admin user for tests \"\"\"", "Theme(\"_test_add_Subtheme_\") self.theme.save() self.theme.commit() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") self.subtheme = self.create_dummy_subtheme() def", "tests :return: an admin user for tests \"\"\" password_hash =", "self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"id\": self.subtheme.id}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def", "password_hash.decode(\"utf8\"), True, True) try: user.save() user.commit() except Exception as e:", "SubTheme.get_by_name(\"_TEST_SUB_THEME_\") if self.subtheme: self.subtheme.delete() self.subtheme.commit() test_sub = SubTheme.get_by_name(\"_TEST_SUB_THEME_2\") if test_sub:", "self.create_test_client() self.user = self.create_admin_user() self.auth_header = self.get_auth_header() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\")", "admin user and creates the authorization header for requests to", "= self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"theme_id\": self.subtheme.t_id, \"name\": self.subtheme.name}, headers=self.auth_header)", "and the Subtheme name has been changed \"\"\" if not", "self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") if not self.theme: self.theme = Theme(\"_test_add_Subtheme_\") self.theme.save()", "current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": self.subtheme.t_id, \"current_name\": current_name,", "header \"\"\" response_login = self.client.post('/login', data=dict(email=self.user.email, password=\"<PASSWORD>\", remember=True), follow_redirects=True) response_login_json", "self.subtheme: self.subtheme = self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme',", "self.subtheme = SubTheme.get_by_name(\"new_name_not_1\") if not self.subtheme: self.subtheme = SubTheme.get_by_name(\"_TEST_SUB_THEME_\") if", "self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": self.subtheme.t_id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code,", "import Users class TestSubTemes(TestCase): \"\"\" Unittest for the creation, renaming", "= SubTheme.get_by_name('_TEST_SUB_THEME_') return subtheme def create_admin_user(self) -> Users: \"\"\" Create", "response status code for http status 204 (NO_CONTENT) \"\"\" if", "the client response status code for http status 200 (OK)", "client response status code for http status 404 \"\"\" if", "response_login = self.client.post('/login', data=dict(email=self.user.email, password=\"<PASSWORD>\", remember=True), follow_redirects=True) response_login_json = response_login.get_json()", "test_delete_subtheme_by_theme_id_and_name(self): \"\"\" Delete a SubTheme by theme_id and name: check", "try: user.save() user.commit() except Exception as e: pass return user", "Rename a SubTheme by id and check the clients response", "test_delete_subtheme_by_id(self): \"\"\" Delete a SubTheme by id and check the", "tests \"\"\" password_hash = bcrypt.hashpw(\"<PASSWORD>\".encode(\"utf-8\"), bcrypt.gensalt()) user = Users.find_by_email(\"<EMAIL>\") if", "self.theme.id, \"subtheme\": \"_TEST_SUB_THEME_2\"}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) json_response = response.get_json() self.assertEqual(json_response[\"message\"],", "HTTPStatus.OK) json_response = response.get_json() self.assertEqual(json_response[\"message\"], \"sub theme created\") self.assertEqual(json_response[\"theme_id\"], self.theme.id)", "def create_dummy_subtheme(self) -> SubTheme: \"\"\" Create SubTheme for tests :return:", "current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_subtheme_id(self): \"\"\" Rename a SubTheme by", "\"sub theme created\") self.assertEqual(json_response[\"theme_id\"], self.theme.id) self.assertEqual(json_response[\"subtheme\"], \"_TEST_SUB_THEME_2\") def test_rename_subtheme_theme_id(self): \"\"\"", "models.users import Users class TestSubTemes(TestCase): \"\"\" Unittest for the creation,", "the clients response status code for http status 404 (OK)", "test_sub.commit() if self.theme: self.theme.delete() self.theme.commit() self.client.post('/logout', headers=self.auth_header) if self.user: self.user.delete()", "\"\"\" Create SubTheme for tests :return: SubTheme for tests \"\"\"", "self.theme.id) self.assertEqual(json_response[\"subtheme\"], \"_TEST_SUB_THEME_2\") def test_rename_subtheme_theme_id(self): \"\"\" Rename a SubTheme by", "as e: pass return user def get_auth_header(self) -> {str: str}:", "test_app.app_context() test_app_context.push() return testing_client, test_app_context def create_dummy_subtheme(self) -> SubTheme: \"\"\"", "test_rename_subtheme_id(self): \"\"\" Rename a SubTheme by id and check the", "for tests :return: an admin user for tests \"\"\" password_hash", "= Users(\"Admin\", \"<EMAIL>\", password_hash.decode(\"utf8\"), True, True) try: user.save() user.commit() except", "self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": self.subtheme.t_id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\"", "data for the expected message 'New theme created' and Theme", "self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"theme_id\": self.subtheme.t_id, \"name\":", "if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"name\":", "headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_subtheme_by_id(self): \"\"\" Delete a SubTheme by", "subtheme: subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_') subtheme.save() subtheme.commit() subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_')", "Themes \"\"\" def setUp(self): \"\"\" Setup a FlaskClient for testing,", "for http status 200 (OK) Check JSON response data for", "test_app_context.push() return testing_client, test_app_context def create_dummy_subtheme(self) -> SubTheme: \"\"\" Create", "\"\"\" Rename a SubTheme by id and check the clients", "'Bearer {}'.format(response_login_json[\"access_token\"])} def test_add_subtheme(self): \"\"\" Create a new SubTheme and", "for http status 204 (NO_CONTENT) \"\"\" if not self.subtheme: self.subtheme", "= self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": self.subtheme.t_id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\" }, headers=self.auth_header)", "expected message 'New theme created' and Theme name \"\"\" response", "flask.ctx import AppContext from flask.testing import FlaskClient from app import", "creates an admin user and creates the authorization header for", "name \"\"\" response = self.client.post('/admin/themes/add_subtheme', json={\"theme_id\": self.theme.id, \"subtheme\": \"_TEST_SUB_THEME_2\"}, headers=self.auth_header)", "headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response = response.get_json() self.assertEqual(response[\"id\"], self.subtheme.id) self.assertEqual(response[\"message\"], \"Subtheme", "SubTheme.get_by_name(\"_TEST_SUB_THEME_2\") if test_sub: test_sub.delete() test_sub.commit() if self.theme: self.theme.delete() self.theme.commit() self.client.post('/logout',", "Theme.get_by_name(\"_test_add_Subtheme_\") self.subtheme = self.create_dummy_subtheme() def create_test_client(self) -> (FlaskClient, AppContext): \"\"\"", "from unittest import TestCase import bcrypt from flask.ctx import AppContext", "\"a3d4f5g6h7j8k0\", \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_non_exsitant_subtheme(self): \"\"\"", "the expected message 'New theme created' and Theme name \"\"\"", "SubTheme: \"\"\" Create SubTheme for tests :return: SubTheme for tests", "import Theme, SubTheme from models.users import Users class TestSubTemes(TestCase): \"\"\"", "renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_subtheme_id(self): \"\"\" Rename a", "(OK) Check JSON response data for the expected message 'New", "\"\"\" Create Admin user for tests :return: an admin user", "self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def tearDown(self): \"\"\" Handle the cleanup after tests\"\"\"", "the authorization header for requests to the Flask Client and", "\"\"\" Delete a SubTheme that does not exist and check", "self.assertEqual(response.status_code, HTTPStatus.OK) json_response = response.get_json() self.assertEqual(json_response[\"message\"], \"sub theme created\") self.assertEqual(json_response[\"theme_id\"],", "self.subtheme.id) self.assertEqual(response[\"message\"], \"Subtheme renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_non_existant_subtheme(self):", "= test_app.test_client() test_app_context = test_app.app_context() test_app_context.push() return testing_client, test_app_context def", "self.assertEqual(json_response[\"theme_id\"], self.theme.id) self.assertEqual(json_response[\"subtheme\"], \"_TEST_SUB_THEME_2\") def test_rename_subtheme_theme_id(self): \"\"\" Rename a SubTheme", "create_app from models.theme import Theme, SubTheme from models.users import Users", "import HTTPStatus from unittest import TestCase import bcrypt from flask.ctx", "a SubTheme by id and check the client response status", "json={\"name\": \"weA_gfj24fhurtyui\", \"theme_id\": -1}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_subtheme_by_id(self): \"\"\"", "requests to the Flask Client and a dummy theme \"\"\"", "create_test_client(self) -> (FlaskClient, AppContext): \"\"\" Create flask testing client :return:", "return testing_client, test_app_context def create_dummy_subtheme(self) -> SubTheme: \"\"\" Create SubTheme", "the expected message 'Subtheme renamed' and the Subtheme name has", "\"\"\" Delete a SubTheme by theme_id and name: check the", "return {'Authorization': 'Bearer {}'.format(response_login_json[\"access_token\"])} def test_add_subtheme(self): \"\"\" Create a new", "json_response = response.get_json() self.assertEqual(json_response[\"message\"], \"sub theme created\") self.assertEqual(json_response[\"theme_id\"], self.theme.id) self.assertEqual(json_response[\"subtheme\"],", "not self.theme: self.theme = Theme(\"_test_add_Subtheme_\") self.theme.save() self.theme.commit() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\")", "password=\"<PASSWORD>\", remember=True), follow_redirects=True) response_login_json = response_login.get_json() return {'Authorization': 'Bearer {}'.format(response_login_json[\"access_token\"])}", "HTTPStatus.OK) response = response.get_json() self.assertEqual(response[\"id\"], self.subtheme.id) self.assertEqual(response[\"message\"], \"Subtheme renamed\") self.assertEqual(response[\"old_name\"],", "\"\"\" test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True) testing_client = test_app.test_client() test_app_context =", "not subtheme: subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_') subtheme.save() subtheme.commit() subtheme =", "test_sub.delete() test_sub.commit() if self.theme: self.theme.delete() self.theme.commit() self.client.post('/logout', headers=self.auth_header) if self.user:", "not self.subtheme: self.subtheme = self.create_dummy_subtheme() current_name = self.subtheme.name response =", "an Authorization header for test :return: An authorization header \"\"\"", "return subtheme def create_admin_user(self) -> Users: \"\"\" Create Admin user", "\"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response = response.get_json() self.assertEqual(response[\"id\"],", "self.subtheme.id) self.assertEqual(response[\"message\"], \"Subtheme renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_subtheme_id(self):", "HTTPStatus.NOT_FOUND) def test_delete_non_exsitant_subtheme(self): \"\"\" Delete a SubTheme that does not", "{str: str}: \"\"\" Create an Authorization header for test :return:", "self.get_auth_header() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") if not self.theme: self.theme = Theme(\"_test_add_Subtheme_\")", "of Themes \"\"\" def setUp(self): \"\"\" Setup a FlaskClient for", "test_rename_subtheme_theme_id(self): \"\"\" Rename a SubTheme by theme_id and check the", "header for test :return: An authorization header \"\"\" response_login =", "remember=True), follow_redirects=True) response_login_json = response_login.get_json() return {'Authorization': 'Bearer {}'.format(response_login_json[\"access_token\"])} def", "self.client.post('/admin/themes/delete_subtheme', json={\"theme_id\": self.subtheme.t_id, \"name\": self.subtheme.name}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def tearDown(self):", "test_app_context = test_app.app_context() test_app_context.push() return testing_client, test_app_context def create_dummy_subtheme(self) ->", "Create a new SubTheme and check the client response status", "from models.users import Users class TestSubTemes(TestCase): \"\"\" Unittest for the", "AppContext): \"\"\" Create flask testing client :return: FlaskClient for tests", "Rename a SubTheme by theme_id and check the clients response", "test_sub = SubTheme.get_by_name(\"_TEST_SUB_THEME_2\") if test_sub: test_sub.delete() test_sub.commit() if self.theme: self.theme.delete()", "-1, \"current_name\": \"a3d4f5g6h7j8k0\", \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def", "a SubTheme by theme_id and check the clients response status", "self.client.post('/admin/themes/delete_subtheme', json={\"id\": self.subtheme.id}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def test_delete_subtheme_by_theme_id_and_name(self): \"\"\" Delete", "FlaskClient from app import create_app from models.theme import Theme, SubTheme", "import AppContext from flask.testing import FlaskClient from app import create_app", "tearDown(self): \"\"\" Handle the cleanup after tests\"\"\" self.subtheme = SubTheme.get_by_name(\"new_name_not_1\")", "tests\"\"\" self.subtheme = SubTheme.get_by_name(\"new_name_not_1\") if not self.subtheme: self.subtheme = SubTheme.get_by_name(\"_TEST_SUB_THEME_\")", "(OK) Check response data for the expected message 'Subtheme renamed'", "= self.create_test_client() self.user = self.create_admin_user() self.auth_header = self.get_auth_header() self.theme =", "renaming and deleting of Themes \"\"\" def setUp(self): \"\"\" Setup", "self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"id\": self.subtheme.id, \"current_name\":", "self.client.post('/admin/themes/add_subtheme', json={\"theme_id\": self.theme.id, \"subtheme\": \"_TEST_SUB_THEME_2\"}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) json_response =", "\"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_non_exsitant_subtheme(self): \"\"\" Delete", "TestCase import bcrypt from flask.ctx import AppContext from flask.testing import", "that does not exist and check the client response status", "from http import HTTPStatus from unittest import TestCase import bcrypt", "self.subtheme: self.subtheme.delete() self.subtheme.commit() test_sub = SubTheme.get_by_name(\"_TEST_SUB_THEME_2\") if test_sub: test_sub.delete() test_sub.commit()", "the cleanup after tests\"\"\" self.subtheme = SubTheme.get_by_name(\"new_name_not_1\") if not self.subtheme:", "http status 200 (OK) Check response data for the expected", "SubTheme.get_by_name('_TEST_SUB_THEME_') return subtheme def create_admin_user(self) -> Users: \"\"\" Create Admin", "JSON response data for the expected message 'New theme created'", "current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"id\": self.subtheme.id, \"current_name\": current_name,", "response = self.client.post('/admin/themes/delete_subtheme', json={\"name\": \"weA_gfj24fhurtyui\", \"theme_id\": -1}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)", "headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) json_response = response.get_json() self.assertEqual(json_response[\"message\"], \"sub theme created\")", "id and check the client response status code for http", "test_delete_non_exsitant_subtheme(self): \"\"\" Delete a SubTheme that does not exist and", "HTTPStatus.NO_CONTENT) def tearDown(self): \"\"\" Handle the cleanup after tests\"\"\" self.subtheme", "test_sub: test_sub.delete() test_sub.commit() if self.theme: self.theme.delete() self.theme.commit() self.client.post('/logout', headers=self.auth_header) if", "status 200 (OK) Check JSON response data for the expected", "code for http status 404 (OK) \"\"\" response = self.client.post('/admin/themes/rename_subtheme',", "status 404 \"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response", "if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"theme_id\":", "user: user = Users(\"Admin\", \"<EMAIL>\", password_hash.decode(\"utf8\"), True, True) try: user.save()", "\"\"\" Rename a SubTheme by theme_id and check the clients", "SubTheme by theme_id and check the clients response status code", "status 200 (OK) Check response data for the expected message", "subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') if not subtheme: subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_')", "response status code for http status 404 \"\"\" if not", "\"\"\" response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": -1, \"current_name\": \"a3d4f5g6h7j8k0\", \"new_name\": \"new_name_not_1\"", "been changed \"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme() current_name", "not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"id\": self.subtheme.id},", "self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"theme_id\": self.subtheme.t_id, \"name\": self.subtheme.name}, headers=self.auth_header) self.assertEqual(response.status_code,", "if test_sub: test_sub.delete() test_sub.commit() if self.theme: self.theme.delete() self.theme.commit() self.client.post('/logout', headers=self.auth_header)", "self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"name\": \"weA_gfj24fhurtyui\", \"theme_id\": -1},", "(FlaskClient, AppContext): \"\"\" Create flask testing client :return: FlaskClient for", "json={\"theme_id\": self.theme.id, \"subtheme\": \"_TEST_SUB_THEME_2\"}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) json_response = response.get_json()", "client response status code for http status 200 (OK) Check", "self.subtheme = self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\":", "status 404 (OK) \"\"\" response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": -1, \"current_name\":", "class TestSubTemes(TestCase): \"\"\" Unittest for the creation, renaming and deleting", "status code for http status 204 (NO_CONTENT) \"\"\" if not", "create_dummy_subtheme(self) -> SubTheme: \"\"\" Create SubTheme for tests :return: SubTheme", "self.assertEqual(response[\"message\"], \"Subtheme renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_subtheme_id(self): \"\"\"", "status code for http status 404 \"\"\" if not self.subtheme:", "self.assertEqual(json_response[\"message\"], \"sub theme created\") self.assertEqual(json_response[\"theme_id\"], self.theme.id) self.assertEqual(json_response[\"subtheme\"], \"_TEST_SUB_THEME_2\") def test_rename_subtheme_theme_id(self):", "self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"id\": self.subtheme.id}, headers=self.auth_header)", "a SubTheme that does not exist and check the client", "response = self.client.post('/admin/themes/add_subtheme', json={\"theme_id\": self.theme.id, \"subtheme\": \"_TEST_SUB_THEME_2\"}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK)", "\"current_name\": \"a3d4f5g6h7j8k0\", \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_non_exsitant_subtheme(self):", "def test_rename_subtheme_theme_id(self): \"\"\" Rename a SubTheme by theme_id and check", "status code for http status 200 (OK) Check response data", "= self.create_dummy_subtheme() def create_test_client(self) -> (FlaskClient, AppContext): \"\"\" Create flask", "json={\"id\": self.subtheme.id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK)", "not exist and check the clients response status code for", "self.subtheme.id}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def test_delete_subtheme_by_theme_id_and_name(self): \"\"\" Delete a SubTheme", "self.subtheme.commit() test_sub = SubTheme.get_by_name(\"_TEST_SUB_THEME_2\") if test_sub: test_sub.delete() test_sub.commit() if self.theme:", "response_login.get_json() return {'Authorization': 'Bearer {}'.format(response_login_json[\"access_token\"])} def test_add_subtheme(self): \"\"\" Create a", "by theme_id and name: check the client response status code", "200 (OK) Check response data for the expected message 'Subtheme", "http status 204 (NO_CONTENT) \"\"\" if not self.subtheme: self.subtheme =", "}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_non_exsitant_subtheme(self): \"\"\" Delete a SubTheme", "-1}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_subtheme_by_id(self): \"\"\" Delete a SubTheme", "self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_non_exsitant_subtheme(self): \"\"\" Delete a SubTheme that does", "import TestCase import bcrypt from flask.ctx import AppContext from flask.testing", "def test_delete_subtheme_by_id(self): \"\"\" Delete a SubTheme by id and check", "testing client :return: FlaskClient for tests and AppContext \"\"\" test_app", "message 'New theme created' and Theme name \"\"\" response =", "{}'.format(response_login_json[\"access_token\"])} def test_add_subtheme(self): \"\"\" Create a new SubTheme and check", "subtheme def create_admin_user(self) -> Users: \"\"\" Create Admin user for", "-> SubTheme: \"\"\" Create SubTheme for tests :return: SubTheme for", "for tests and AppContext \"\"\" test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True) testing_client", "= self.client.post('/admin/themes/add_subtheme', json={\"theme_id\": self.theme.id, \"subtheme\": \"_TEST_SUB_THEME_2\"}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) json_response", "headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_non_exsitant_subtheme(self): \"\"\" Delete a SubTheme that", "from models.theme import Theme, SubTheme from models.users import Users class", "password_hash = bcrypt.hashpw(\"<PASSWORD>\".encode(\"utf-8\"), bcrypt.gensalt()) user = Users.find_by_email(\"<EMAIL>\") if not user:", "str}: \"\"\" Create an Authorization header for test :return: An", "http import HTTPStatus from unittest import TestCase import bcrypt from", "headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def test_delete_subtheme_by_theme_id_and_name(self): \"\"\" Delete a SubTheme by", "\"\"\" Unittest for the creation, renaming and deleting of Themes", "for test :return: An authorization header \"\"\" response_login = self.client.post('/login',", "if not user: user = Users(\"Admin\", \"<EMAIL>\", password_hash.decode(\"utf8\"), True, True)", "exist and check the clients response status code for http", "code for http status 200 (OK) Check response data for", "if not self.theme: self.theme = Theme(\"_test_add_Subtheme_\") self.theme.save() self.theme.commit() self.theme =", "self.assertEqual(response[\"id\"], self.subtheme.id) self.assertEqual(response[\"message\"], \"Subtheme renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def", "\"\"\" password_hash = bcrypt.hashpw(\"<PASSWORD>\".encode(\"utf-8\"), bcrypt.gensalt()) user = Users.find_by_email(\"<EMAIL>\") if not", "= response_login.get_json() return {'Authorization': 'Bearer {}'.format(response_login_json[\"access_token\"])} def test_add_subtheme(self): \"\"\" Create", "code for http status 204 (NO_CONTENT) \"\"\" if not self.subtheme:", "= Theme.get_by_name(\"_test_add_Subtheme_\") self.subtheme = self.create_dummy_subtheme() def create_test_client(self) -> (FlaskClient, AppContext):", "self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") self.subtheme = self.create_dummy_subtheme() def create_test_client(self) -> (FlaskClient,", "\"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response = response.get_json() self.assertEqual(response[\"id\"], self.subtheme.id)", "self.subtheme: self.subtheme = SubTheme.get_by_name(\"_TEST_SUB_THEME_\") if self.subtheme: self.subtheme.delete() self.subtheme.commit() test_sub =", "AppContext from flask.testing import FlaskClient from app import create_app from", "self.subtheme.name}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def tearDown(self): \"\"\" Handle the cleanup", "and deleting of Themes \"\"\" def setUp(self): \"\"\" Setup a", "if self.subtheme: self.subtheme.delete() self.subtheme.commit() test_sub = SubTheme.get_by_name(\"_TEST_SUB_THEME_2\") if test_sub: test_sub.delete()", "Theme name \"\"\" response = self.client.post('/admin/themes/add_subtheme', json={\"theme_id\": self.theme.id, \"subtheme\": \"_TEST_SUB_THEME_2\"},", "if self.user: self.user.delete() self.user.commit() self.app_context.pop() if __name__ == '__main__': unittest.main()", "Handle the cleanup after tests\"\"\" self.subtheme = SubTheme.get_by_name(\"new_name_not_1\") if not", "self.create_dummy_subtheme() def create_test_client(self) -> (FlaskClient, AppContext): \"\"\" Create flask testing", "client response status code for http status 204 (NO_CONTENT) \"\"\"", "Create flask testing client :return: FlaskClient for tests and AppContext", "def test_rename_subtheme_id(self): \"\"\" Rename a SubTheme by id and check", "self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"name\": \"weA_gfj24fhurtyui\", \"theme_id\": -1}, headers=self.auth_header) self.assertEqual(response.status_code,", "\"theme_id\": -1}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_subtheme_by_id(self): \"\"\" Delete a", "self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"id\": self.subtheme.id}, headers=self.auth_header) self.assertEqual(response.status_code,", "for tests \"\"\" password_hash = bcrypt.hashpw(\"<PASSWORD>\".encode(\"utf-8\"), bcrypt.gensalt()) user = Users.find_by_email(\"<EMAIL>\")", "self.theme = Theme(\"_test_add_Subtheme_\") self.theme.save() self.theme.commit() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") self.subtheme =", "id and check the clients response status code for http", "test_add_subtheme(self): \"\"\" Create a new SubTheme and check the client", "Theme, SubTheme from models.users import Users class TestSubTemes(TestCase): \"\"\" Unittest", "get_auth_header(self) -> {str: str}: \"\"\" Create an Authorization header for", "not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"name\": \"weA_gfj24fhurtyui\",", "(NO_CONTENT) \"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response =", "renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_non_existant_subtheme(self): \"\"\" Rename a", "the client response status code for http status 404 \"\"\"", "\"\"\" Rename a SubTheme that does not exist and check", "Create an Authorization header for test :return: An authorization header", "the client response status code for http status 204 (NO_CONTENT)", "bcrypt from flask.ctx import AppContext from flask.testing import FlaskClient from", "204 (NO_CONTENT) \"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response", "json={\"id\": self.subtheme.id}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def test_delete_subtheme_by_theme_id_and_name(self): \"\"\" Delete a", "testing_client = test_app.test_client() test_app_context = test_app.app_context() test_app_context.push() return testing_client, test_app_context", "-> (FlaskClient, AppContext): \"\"\" Create flask testing client :return: FlaskClient", "code for http status 200 (OK) Check JSON response data", "response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": self.subtheme.t_id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\" },", "\"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme',", "response = self.client.post('/admin/themes/delete_subtheme', json={\"theme_id\": self.subtheme.t_id, \"name\": self.subtheme.name}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT)", "\"\"\" Create flask testing client :return: FlaskClient for tests and", "theme created\") self.assertEqual(json_response[\"theme_id\"], self.theme.id) self.assertEqual(json_response[\"subtheme\"], \"_TEST_SUB_THEME_2\") def test_rename_subtheme_theme_id(self): \"\"\" Rename", "\"\"\" Setup a FlaskClient for testing, creates an admin user", "user def get_auth_header(self) -> {str: str}: \"\"\" Create an Authorization", "status code for http status 404 (OK) \"\"\" response =", "Create Admin user for tests :return: an admin user for", "subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_') subtheme.save() subtheme.commit() subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') return", "FlaskClient for tests and AppContext \"\"\" test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True)", "self.subtheme = SubTheme.get_by_name(\"_TEST_SUB_THEME_\") if self.subtheme: self.subtheme.delete() self.subtheme.commit() test_sub = SubTheme.get_by_name(\"_TEST_SUB_THEME_2\")", "= self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"id\": self.subtheme.id, \"current_name\": current_name, \"new_name\":", "\"subtheme\": \"_TEST_SUB_THEME_2\"}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) json_response = response.get_json() self.assertEqual(json_response[\"message\"], \"sub", "the clients response status code for http status 200 (OK)", "\"\"\" Create a new SubTheme and check the client response", "app import create_app from models.theme import Theme, SubTheme from models.users", "test_rename_non_existant_subtheme(self): \"\"\" Rename a SubTheme that does not exist and", "import unittest from http import HTTPStatus from unittest import TestCase", "if not self.subtheme: self.subtheme = SubTheme.get_by_name(\"_TEST_SUB_THEME_\") if self.subtheme: self.subtheme.delete() self.subtheme.commit()", "def get_auth_header(self) -> {str: str}: \"\"\" Create an Authorization header", "code for http status 404 \"\"\" if not self.subtheme: self.subtheme", "headers=self.auth_header) if self.user: self.user.delete() self.user.commit() self.app_context.pop() if __name__ == '__main__':", ":return: an admin user for tests \"\"\" password_hash = bcrypt.hashpw(\"<PASSWORD>\".encode(\"utf-8\"),", "self.client.post('/admin/themes/rename_subtheme', json={\"id\": self.subtheme.id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code,", "self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"name\": \"weA_gfj24fhurtyui\", \"theme_id\":", "self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"id\": self.subtheme.id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\"", "new SubTheme and check the client response status code for", "creation, renaming and deleting of Themes \"\"\" def setUp(self): \"\"\"", "HTTPStatus from unittest import TestCase import bcrypt from flask.ctx import", ":return: An authorization header \"\"\" response_login = self.client.post('/login', data=dict(email=self.user.email, password=\"<PASSWORD>\",", "response status code for http status 404 (OK) \"\"\" response", "authorization header for requests to the Flask Client and a", "Theme.get_by_name(\"_test_add_Subtheme_\") if not self.theme: self.theme = Theme(\"_test_add_Subtheme_\") self.theme.save() self.theme.commit() self.theme", "testing_client, test_app_context def create_dummy_subtheme(self) -> SubTheme: \"\"\" Create SubTheme for", "models.theme import Theme, SubTheme from models.users import Users class TestSubTemes(TestCase):", "self.theme.save() self.theme.commit() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") self.subtheme = self.create_dummy_subtheme() def create_test_client(self)", "for requests to the Flask Client and a dummy theme", "\"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme() current_name = self.subtheme.name", "SubTheme by id and check the client response status code", "self.assertEqual(response.status_code, HTTPStatus.OK) response = response.get_json() self.assertEqual(response[\"id\"], self.subtheme.id) self.assertEqual(response[\"message\"], \"Subtheme renamed\")", "self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_subtheme_by_id(self): \"\"\" Delete a SubTheme by id", "to the Flask Client and a dummy theme \"\"\" self.client,", "a SubTheme that does not exist and check the clients", "Unittest for the creation, renaming and deleting of Themes \"\"\"", "http status 404 (OK) \"\"\" response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": -1,", "if not self.subtheme: self.subtheme = self.create_dummy_subtheme() current_name = self.subtheme.name response", "response_login_json = response_login.get_json() return {'Authorization': 'Bearer {}'.format(response_login_json[\"access_token\"])} def test_add_subtheme(self): \"\"\"", "An authorization header \"\"\" response_login = self.client.post('/login', data=dict(email=self.user.email, password=\"<PASSWORD>\", remember=True),", "and a dummy theme \"\"\" self.client, self.app_context = self.create_test_client() self.user", "True) try: user.save() user.commit() except Exception as e: pass return", "200 (OK) Check JSON response data for the expected message", "and name: check the client response status code for http", "self.subtheme.t_id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response", "SubTheme by theme_id and name: check the client response status", "subtheme.commit() subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') return subtheme def create_admin_user(self) -> Users:", "404 (OK) \"\"\" response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": -1, \"current_name\": \"a3d4f5g6h7j8k0\",", "self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_subtheme_id(self): \"\"\" Rename a SubTheme", "self.theme: self.theme = Theme(\"_test_add_Subtheme_\") self.theme.save() self.theme.commit() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") self.subtheme", "admin user for tests \"\"\" password_hash = bcrypt.hashpw(\"<PASSWORD>\".encode(\"utf-8\"), bcrypt.gensalt()) user", "json={\"theme_id\": self.subtheme.t_id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK)", "def create_admin_user(self) -> Users: \"\"\" Create Admin user for tests", "self.client, self.app_context = self.create_test_client() self.user = self.create_admin_user() self.auth_header = self.get_auth_header()", "create_app(DATABASE_NAME='test_analysis', TESTING=True) testing_client = test_app.test_client() test_app_context = test_app.app_context() test_app_context.push() return", "def create_test_client(self) -> (FlaskClient, AppContext): \"\"\" Create flask testing client", "self.theme.commit() self.client.post('/logout', headers=self.auth_header) if self.user: self.user.delete() self.user.commit() self.app_context.pop() if __name__", "response.get_json() self.assertEqual(json_response[\"message\"], \"sub theme created\") self.assertEqual(json_response[\"theme_id\"], self.theme.id) self.assertEqual(json_response[\"subtheme\"], \"_TEST_SUB_THEME_2\") def", "= Theme(\"_test_add_Subtheme_\") self.theme.save() self.theme.commit() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") self.subtheme = self.create_dummy_subtheme()", "user and creates the authorization header for requests to the", "user.commit() except Exception as e: pass return user def get_auth_header(self)", "clients response status code for http status 200 (OK) Check", "not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={\"theme_id\": self.subtheme.t_id,", "self.subtheme.delete() self.subtheme.commit() test_sub = SubTheme.get_by_name(\"_TEST_SUB_THEME_2\") if test_sub: test_sub.delete() test_sub.commit() if", "subtheme.save() subtheme.commit() subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') return subtheme def create_admin_user(self) ->", "a FlaskClient for testing, creates an admin user and creates", "\"Subtheme renamed\") self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_subtheme_id(self): \"\"\" Rename", "renamed' and the Subtheme name has been changed \"\"\" if", ":return: SubTheme for tests \"\"\" subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') if not", "a SubTheme by theme_id and name: check the client response", "self.auth_header = self.get_auth_header() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") if not self.theme: self.theme", "does not exist and check the client response status code", "testing, creates an admin user and creates the authorization header", "\"\"\" def setUp(self): \"\"\" Setup a FlaskClient for testing, creates", "AppContext \"\"\" test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True) testing_client = test_app.test_client() test_app_context", "FlaskClient for testing, creates an admin user and creates the", "client :return: FlaskClient for tests and AppContext \"\"\" test_app =", "-> Users: \"\"\" Create Admin user for tests :return: an", "self.subtheme.id, \"current_name\": current_name, \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response", "theme_id and name: check the client response status code for", "\"\"\" response_login = self.client.post('/login', data=dict(email=self.user.email, password=\"<PASSWORD>\", remember=True), follow_redirects=True) response_login_json =", "status 204 (NO_CONTENT) \"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme()", "= self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": self.subtheme.t_id, \"current_name\": current_name, \"new_name\":", "setUp(self): \"\"\" Setup a FlaskClient for testing, creates an admin", "data for the expected message 'Subtheme renamed' and the Subtheme", "import create_app from models.theme import Theme, SubTheme from models.users import", "name: check the client response status code for http status", "\"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_non_exsitant_subtheme(self): \"\"\" Delete a", "http status 200 (OK) Check JSON response data for the", "return user def get_auth_header(self) -> {str: str}: \"\"\" Create an", "for http status 200 (OK) Check response data for the", "self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def test_delete_subtheme_by_theme_id_and_name(self): \"\"\" Delete a SubTheme by theme_id", "for the expected message 'Subtheme renamed' and the Subtheme name", "the Flask Client and a dummy theme \"\"\" self.client, self.app_context", "'New theme created' and Theme name \"\"\" response = self.client.post('/admin/themes/add_subtheme',", "\"\"\" Delete a SubTheme by id and check the client", "json={\"theme_id\": -1, \"current_name\": \"a3d4f5g6h7j8k0\", \"new_name\": \"new_name_not_1\" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)", "self.theme: self.theme.delete() self.theme.commit() self.client.post('/logout', headers=self.auth_header) if self.user: self.user.delete() self.user.commit() self.app_context.pop()", "Users.find_by_email(\"<EMAIL>\") if not user: user = Users(\"Admin\", \"<EMAIL>\", password_hash.decode(\"utf8\"), True,", "= SubTheme.get_by_name(\"new_name_not_1\") if not self.subtheme: self.subtheme = SubTheme.get_by_name(\"_TEST_SUB_THEME_\") if self.subtheme:", "self.assertEqual(json_response[\"subtheme\"], \"_TEST_SUB_THEME_2\") def test_rename_subtheme_theme_id(self): \"\"\" Rename a SubTheme by theme_id", "by id and check the clients response status code for", "subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') return subtheme def create_admin_user(self) -> Users: \"\"\"", "\"new_name_not_1\") def test_rename_subtheme_id(self): \"\"\" Rename a SubTheme by id and", "\"_TEST_SUB_THEME_2\") def test_rename_subtheme_theme_id(self): \"\"\" Rename a SubTheme by theme_id and", "SubTheme that does not exist and check the clients response", "check the client response status code for http status 204", "check the client response status code for http status 404", "user = Users.find_by_email(\"<EMAIL>\") if not user: user = Users(\"Admin\", \"<EMAIL>\",", "= self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"id\": self.subtheme.id,", "= response.get_json() self.assertEqual(json_response[\"message\"], \"sub theme created\") self.assertEqual(json_response[\"theme_id\"], self.theme.id) self.assertEqual(json_response[\"subtheme\"], \"_TEST_SUB_THEME_2\")", "expected message 'Subtheme renamed' and the Subtheme name has been", "Delete a SubTheme by id and check the client response", "self.assertEqual(response[\"old_name\"], current_name) self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_non_existant_subtheme(self): \"\"\" Rename a SubTheme", "check the clients response status code for http status 200", "and Theme name \"\"\" response = self.client.post('/admin/themes/add_subtheme', json={\"theme_id\": self.theme.id, \"subtheme\":", "theme \"\"\" self.client, self.app_context = self.create_test_client() self.user = self.create_admin_user() self.auth_header", "Exception as e: pass return user def get_auth_header(self) -> {str:", "\"\"\" response = self.client.post('/admin/themes/add_subtheme', json={\"theme_id\": self.theme.id, \"subtheme\": \"_TEST_SUB_THEME_2\"}, headers=self.auth_header) self.assertEqual(response.status_code,", "Rename a SubTheme that does not exist and check the", "SubTheme.get_by_name('_TEST_SUB_THEME_') if not subtheme: subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_') subtheme.save() subtheme.commit()", "def setUp(self): \"\"\" Setup a FlaskClient for testing, creates an", "from flask.ctx import AppContext from flask.testing import FlaskClient from app", "after tests\"\"\" self.subtheme = SubTheme.get_by_name(\"new_name_not_1\") if not self.subtheme: self.subtheme =", "HTTPStatus.NOT_FOUND) def test_delete_subtheme_by_id(self): \"\"\" Delete a SubTheme by id and", "\"name\": self.subtheme.name}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def tearDown(self): \"\"\" Handle the", "TESTING=True) testing_client = test_app.test_client() test_app_context = test_app.app_context() test_app_context.push() return testing_client,", "for the expected message 'New theme created' and Theme name", "self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\": self.subtheme.t_id, \"current_name\":", "for http status 404 (OK) \"\"\" response = self.client.post('/admin/themes/rename_subtheme', json={\"theme_id\":", "Delete a SubTheme that does not exist and check the", "theme created' and Theme name \"\"\" response = self.client.post('/admin/themes/add_subtheme', json={\"theme_id\":", "by theme_id and check the clients response status code for", "tests and AppContext \"\"\" test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True) testing_client =", "e: pass return user def get_auth_header(self) -> {str: str}: \"\"\"", "exist and check the client response status code for http", "follow_redirects=True) response_login_json = response_login.get_json() return {'Authorization': 'Bearer {}'.format(response_login_json[\"access_token\"])} def test_add_subtheme(self):", ":return: FlaskClient for tests and AppContext \"\"\" test_app = create_app(DATABASE_NAME='test_analysis',", "Client and a dummy theme \"\"\" self.client, self.app_context = self.create_test_client()", "SubTheme for tests \"\"\" subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') if not subtheme:", "self.assertEqual(response[\"new_name\"], \"new_name_not_1\") def test_rename_non_existant_subtheme(self): \"\"\" Rename a SubTheme that does", "self.app_context = self.create_test_client() self.user = self.create_admin_user() self.auth_header = self.get_auth_header() self.theme", "response status code for http status 200 (OK) Check JSON", "SubTheme.get_by_name(\"new_name_not_1\") if not self.subtheme: self.subtheme = SubTheme.get_by_name(\"_TEST_SUB_THEME_\") if self.subtheme: self.subtheme.delete()", "self.theme.commit() self.theme = Theme.get_by_name(\"_test_add_Subtheme_\") self.subtheme = self.create_dummy_subtheme() def create_test_client(self) ->", "self.subtheme = self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={\"id\":", "= bcrypt.hashpw(\"<PASSWORD>\".encode(\"utf-8\"), bcrypt.gensalt()) user = Users.find_by_email(\"<EMAIL>\") if not user: user", "response data for the expected message 'Subtheme renamed' and the", "SubTheme by id and check the clients response status code", "check the clients response status code for http status 404", "def test_delete_non_exsitant_subtheme(self): \"\"\" Delete a SubTheme that does not exist", "def tearDown(self): \"\"\" Handle the cleanup after tests\"\"\" self.subtheme =", "has been changed \"\"\" if not self.subtheme: self.subtheme = self.create_dummy_subtheme()", "creates the authorization header for requests to the Flask Client", "}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response = response.get_json() self.assertEqual(response[\"id\"], self.subtheme.id) self.assertEqual(response[\"message\"]," ]
[ "BIT[1] 0x00000002 <96> Speed aided with sensor data BIT[2] 0x00000004", "computed by WLS in seconds */ uint32 q_FltSftOffsetSigmaSec; /* Gaussian", "up components of user velocity */ uint32 q_FltClockBiasMeters; /* Receiver", "float clock_frequency_uncertainty; uint8_t sv_count; \"\"\" gps_measurement_report_sv = \"\"\" uint8_t sv_id;", "LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1 LOG_GNSS_ME_DPO_STATUS = 0x1838 LOG_GNSS_CD_DB_REPORT", "north, up coordinate frame. In meters per second. */ uint32", "uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for BeiDou to", "*/ uint8 u_PosSource; /* Source of position information */ /*", "or '_Dbl' in nam: st += \"d\" elif typ in", "heading uncertainty in radians */ uint32 q_FltVelEnuMps[3]; /* User velocity", "positions reported */ uint64 t_DblFinalPosLatLon[2]; /* Final latitude and longitude", "u_NumBdsSvsUsed; /* The number of BeiDou SVs used in the", "ellipse values */ uint32 q_FltEllipseAngle; /* Angle of semimajor axis", "number of position */ uint32 q_GpsFixTimeMs; /* GPS fix time", "Gaussian 1-sigma value for clock drift in meters per second", "\"\"\" gps_measurement_report_sv = \"\"\" uint8_t sv_id; uint8_t observation_state; // SVObservationStates", "uint32 q_Reserved1; /* Reserved memory field */ uint16 w_PosVelFlag; /*", "st += \"i\" elif typ in [\"uint16\", \"uint16_t\"]: st +=", "east, north, up coordinate frame. In meters per second. */", "clockwise from North. In units of degrees. */ uint32 q_FltEllipseSemimajorAxis;", "type\", typ) assert False if '[' in nam: cnt =", "used to compute this position fix. BIT[0] 0x00000001 <96> Accelerometer", "i += 2 else: ret.append(nam[i]) i += 1 return ''.join(ret)", "nams = [] for l in ss.strip().split(\"\\n\"): typ, nam =", "0x1477 LOG_GNSS_CLOCK_REPORT = 0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756", "least-squares 2: Kalman filter 3: Externally injected 4: Internal database", "LOG_GNSS_POSITION_REPORT = 0x1476 LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477 LOG_GNSS_CLOCK_REPORT = 0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT", "of precision as computed from the unweighted uint32 q_FltHdop; /*", "\"i\" elif typ in [\"uint16\", \"uint16_t\"]: st += \"H\" elif", "frame. In meters per second. */ uint32 q_FltVelSigmaMps[3]; /* Gaussian", "uncertainty error ellipse. In units of meters. */ uint32 q_FltPosSigmaVertical;", "*/ uint32 q_FltGBTBMeters; /* GPS to BeiDou time bias in", "float fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; \"\"\" gps_measurement_report", "nam) if typ == \"float\" or '_Flt' in nam: st", "counter */ uint8 u_PosSource; /* Source of position information */", "int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count;", "Reserved */ uint8 u_NumGpsSvsUsed; /* The number of GPS SVs", "gps_measurement_report = \"\"\" uint8_t version; uint32_t f_count; uint16_t week; uint32_t", "q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude of position */ uint32 q_FltHeadingRad;", "radians */ uint32 q_FltHeadingUncRad; /* User heading uncertainty in radians", "/* User heading in radians derived from GNSS only solution", "data were used to compute this position fix. BIT[0] 0x00000001", "0x1476 documentation) */ uint32 _fake_align_week_number; uint16 w_GpsWeekNumber; /* GPS week", "filter 3: Externally injected 4: Internal database */ uint32 q_Reserved1;", "frequency bias) in meters per second */ uint32 q_FltClockDriftSigmaMps; /*", "uint32 q_FltEllipseAngle; /* Angle of semimajor axis with respect to", "= [] i = 0 while i < len(nam): if", "GPS to BeiDou time bias uncertainty in meters */ uint32", "Kalman filter 3: Externally injected 4: Internal database */ uint32", "/* Denotes which component of the position report was assisted", "1-sigma value for final position height-above-ellipsoid altitude in meters */", "BeiDou time bias in meters */ uint32 q_FltFiltGBTBSigmaMeters; /* Filtered", "to 1 indicates that certain fields as defined by the", "meters */ uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for", "q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value for GPS to BeiDou time", "Very Low 2: Low 3: Medium 4: High */ uint8", "Filtered Gaussian 1-sigma value for GPS to Glonass time bias", "q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value for receiver clock bias in", "position dilution of precision as computed from the unweighted least-squares", "uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations; uint8_t hemming_error_count;", "velocity 2 bit field: (see DM log 0x1476 documentation) */", "u_Version; /* Version number of DM log */ uint32 q_Fcount;", "typ == \"float\" or '_Flt' in nam: st += \"f\"", "/* The number of GPS SVs used in the fix", "field */ uint32 q_FltGnssHeadingRad; /* User heading in radians derived", "uncertainty in meters */ uint32 q_FltBGTBMeters; /* BeiDou to Glonass", "GPS to Glonass time bias in meters */ uint32 q_FltGGTBSigmaMeters;", "nams.append(nam) return st, nams def dict_unpacker(ss, camelcase = False): st,", "SVs used in the fix */ uint8 u_TotalGloSvs; /* Total", "/* Filtered Gaussian 1-sigma value for GPS to BeiDou time", "\"uint64\": st += \"Q\" else: print(\"unknown type\", typ) assert False", "Final height-above-ellipsoid altitude of position */ uint32 q_FltHeadingRad; /* User", "= 0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT =", "print(\"unknown type\", typ) assert False if '[' in nam: cnt", "= \"\"\" uint8 u_Version; /* Version number of DM log", "moving clockwise from North. In units of degrees. */ uint32", "in the fix */ uint8 u_TotalBdsSvs; /* Total number of", "= \"<\" nams = [] for l in ss.strip().split(\"\\n\"): typ,", "in ss.strip().split(\"\\n\"): typ, nam = l.split(\";\")[0].split() #print(typ, nam) if typ", "nam = l.split(\";\")[0].split() #print(typ, nam) if typ == \"float\" or", "% (nam.split(\"[\")[0], i)) else: nams.append(nam) return st, nams def dict_unpacker(ss,", "sensor data 0xFFFFFFF0 <96> Reserved */ uint8 u_NumGpsSvsUsed; /* The", "fix time of day in milliseconds */ uint32 q_PosCount; /*", "Reserved memory field */ uint32 q_FltGnssHeadingRad; /* User heading in", "axis with respect to true North, with increasing angles moving", "by the SENSOR_AIDING_MASK were aided with sensor data*/ uint32 q_SensorAidMask;", "in seconds */ uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value for", "/* Gaussian 1-sigma value for filtered height-above-ellipsoid altitude in meters", "bias in meters */ uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value", "parity_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval; uint16_t", "u_NumGloSvsUsed; /* The number of Glonass SVs used in the", "The number of Glonass SVs used in the fix */", "st, nams def dict_unpacker(ss, camelcase = False): st, nams =", "2 bit field: (see DM log 0x1476 documentation) */ uint8", "w_PosVelFlag; /* Position velocity bit field: (see DM log 0x1476", "solution */ uint32 q_SensorDataUsageMask; /* Denotes which additional sensor data", "in meters */ uint32 q_FltBGTBMeters; /* BeiDou to Glonass time", "of GPS SVs detected by searcher, including ones not used", "in meters */ uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou to Glonass", "<96> Speed aided with sensor data BIT[2] 0x00000004 <96> Position", "to BeiDou time bias in meters */ uint32 q_FltGBTBSigmaMeters; /*", "u_VerticalReliability; /* Vertical position reliability */ uint16 w_Reserved2; /* Reserved", "the SENSOR_AIDING_MASK were aided with sensor data*/ uint32 q_SensorAidMask; /*", "q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value for BeiDou to Glonass time", "float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; \"\"\" gps_measurement_report = \"\"\"", "uint8_t good_observations; uint16_t parity_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency;", "BeiDou SVs used in the fix */ uint8 u_TotalBdsSvs; /*", "field: (see DM log 0x1476 documentation) */ uint32 _fake_align_week_number; uint16", "Glonass time bias in meters */ uint32 q_FltFiltBGTBSigmaMeters; /* Filtered", "used in position calculation */ uint8 u_NumBdsSvsUsed; /* The number", "value for GPS to BeiDou time bias uncertainty in meters", "and longitude of position in radians */ uint32 q_FltFinalPosAlt; /*", "data BIT[1] 0x00000002 <96> Speed aided with sensor data BIT[2]", "w_GpsWeekNumber; /* GPS week number of position */ uint32 q_GpsFixTimeMs;", "/* Angle of semimajor axis with respect to true North,", "cycle */ uint32 q_GloFixTimeMs; /* Glonass fix time of day", "nam: st += \"f\" elif typ == \"double\" or '_Dbl'", "derived from GNSS only solution */ uint32 q_FltGnssHeadingUncRad; /* User", "(nam.split(\"[\")[0], i)) else: nams.append(nam) return st, nams def dict_unpacker(ss, camelcase", "additional sensor data were used to compute this position fix.", "/* Gaussian 1-sigma value for BeiDou to Glonass time bias", "elif typ == \"uint64\": st += \"Q\" else: print(\"unknown type\",", "nams.append(\"%s[%d]\" % (nam.split(\"[\")[0], i)) else: nams.append(nam) return st, nams def", "as computed from the unweighted least-squares covariance matrix */ uint8", "sensor data BIT[2] 0x00000004 <96> Position aided with sensor data", "by searcher, including ones not used in position calculation */", "Raw height-above-ellipsoid altitude in meters as computed by WLS */", "unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t misc_status; uint32_t multipath_estimate; float azimuth; float", "Gaussian 1-sigma value for filtered height-above-ellipsoid altitude in meters */", "uint32 q_FltGBTBMeters; /* GPS to BeiDou time bias in meters", "\"Q\" else: print(\"unknown type\", typ) assert False if '[' in", "value for SFT offset in seconds */ uint32 q_FltClockDriftMps; /*", "meters */ uint32 q_FltSftOffsetSec; /* SFT offset as computed by", "'[' in nam: cnt = int(nam.split(\"[\")[1].split(\"]\")[0]) st += st[-1]*(cnt-1) for", "in meters */ uint32 q_FltGGTBMeters; /* GPS to Glonass time", "for BeiDou to Glonass time bias uncertainty in meters */", "uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to", "uncertainty in meters */ uint32 q_FltGBTBMeters; /* GPS to BeiDou", "*/ uint8 u_HorizontalReliability; /* Horizontal position reliability 0: Not set", "BeiDou time bias in meters */ uint32 q_FltGBTBSigmaMeters; /* Gaussian", "*/ uint32 q_FltGnssHeadingUncRad; /* User heading uncertainty in radians derived", "uint8_t version; uint32_t f_count; uint8_t glonass_cycle_number; uint16_t glonass_number_of_days; uint32_t milliseconds;", "st += \"d\" elif typ in [\"uint8\", \"uint8_t\"]: st +=", "typ in [\"int32\", \"int32_t\"]: st += \"i\" elif typ in", "uint16 w_PosVelFlag; /* Position velocity bit field: (see DM log", "camelcase: nams = [name_to_camelcase(x) for x in nams] sz =", "ret.append(nam[i+1].upper()) i += 2 else: ret.append(nam[i]) i += 1 return", "uint32 q_FltFiltGBTBMeters; /* Filtered GPS to BeiDou time bias in", "frequency_index; uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations; uint8_t", "uint8 u_PosSource; /* Source of position information */ /* 0:", "+= \"Q\" else: print(\"unknown type\", typ) assert False if '['", "meters */ uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value for GPS", "raw height-above-ellipsoid altitude in meters */ uint32 align_Flt[14]; uint32 q_FltPdop;", "Number of Glonass four year cycles */ uint16 w_GloNumDaysInFourYear; /*", "altitude in meters */ uint32 align_Flt[14]; uint32 q_FltPdop; /* 3D", "0x0000FFFC - Reserved A bit set to 1 indicates that", "Gaussian 1-sigma value for receiver clock bias in meters */", "i in range(cnt): nams.append(\"%s[%d]\" % (nam.split(\"[\")[0], i)) else: nams.append(nam) return", "The number of GPS SVs used in the fix */", "uint16_t glonass_number_of_days; uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float clock_frequency_bias;", "1-sigma value for GPS to BeiDou time bias uncertainty in", "uint32 q_GloFixTimeMs; /* Glonass fix time of day in milliseconds", "BIT[0] 0x00000001 <96> Accelerometer BIT[1] 0x00000002 <96> Gyro 0x0000FFFC -", "= 0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1 LOG_GNSS_ME_DPO_STATUS = 0x1838 LOG_GNSS_CD_DB_REPORT =", "North. In units of degrees. */ uint32 q_FltEllipseSemimajorAxis; /* Semimajor", "Velocity aided with sensor data 0xFFFFFFF0 <96> Reserved */ uint8", "uint16_t parity_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval;", "milliseconds */ uint32 q_PosCount; /* Integer count of the number", "\"f\" elif typ == \"double\" or '_Dbl' in nam: st", "to Glonass time bias in meters */ uint32 q_FltGGTBSigmaMeters; /*", "to Glonass time bias uncertainty in meters */ uint32 q_FltSftOffsetSec;", "\"int8_t\"]: st += \"b\" elif typ in [\"uint32\", \"uint32_t\"]: st", "of degrees. */ uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis of final", "0x1516 glonass_measurement_report = \"\"\" uint8_t version; uint32_t f_count; uint8_t glonass_cycle_number;", "bit field: (see DM log 0x1476 documentation) */ uint32 _fake_align_week_number;", "0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE", "/* Vertical position reliability */ uint16 w_Reserved2; /* Reserved memory", "else: print(\"unknown type\", typ) assert False if '[' in nam:", "bias uncertainty in meters */ uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou", "number of GPS SVs used in the fix */ uint8", "uint8 u_NumGpsSvsUsed; /* The number of GPS SVs used in", "= 0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT =", "DM log 0x1476 documentation) */ uint16 w_FixEvents; /* Fix events", "time bias uncertainty in meters */ uint32 q_FltBGTBMeters; /* BeiDou", "Receiver clock bias in meters */ uint32 q_FltClockBiasSigmaMeters; /* Gaussian", "uint32 q_FltBGTBMeters; /* BeiDou to Glonass time bias in meters", "\"uint16_t\"]: st += \"H\" elif typ in [\"int16\", \"int16_t\"]: st", "Semimajor axis of final horizontal position uncertainty error ellipse. In", "LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1 LOG_GNSS_ME_DPO_STATUS = 0x1838 LOG_GNSS_CD_DB_REPORT = 0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT", "uint8_t misc_status; uint32_t multipath_estimate; float azimuth; float elevation; int32_t carrier_phase_cycles_integral;", "information */ /* 0: None 1: Weighted least-squares 2: Kalman", "radians derived from GNSS only solution */ uint32 q_FltGnssHeadingUncRad; /*", "millisecond counter */ uint8 u_PosSource; /* Source of position information", "0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1", "value for GPS to Glonass time bias uncertainty in meters", "position */ uint32 q_FltHeadingRad; /* User heading in radians */", "*/ uint32 q_GloFixTimeMs; /* Glonass fix time of day in", "count of the number of unique positions reported */ uint64", "typ in [\"uint8\", \"uint8_t\"]: st += \"B\" elif typ in", "height-above-ellipsoid altitude in meters */ uint8 u_HorizontalReliability; /* Horizontal position", "documentation) */ uint32 _fake_align_week_number; uint16 w_GpsWeekNumber; /* GPS week number", "typ, nam = l.split(\";\")[0].split() #print(typ, nam) if typ == \"float\"", "/* Position velocity 2 bit field: (see DM log 0x1476", "uint16 w_FixEvents; /* Fix events bit field: (see DM log", "*/ uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value for receiver clock", "Glonass SVs detected by searcher, including ones not used in", "unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t", "fix */ uint8 u_TotalGpsSvs; /* Total number of GPS SVs", "uint32_t measurement_status; uint8_t misc_status; uint32_t multipath_estimate; float azimuth; float elevation;", "User heading in radians derived from GNSS only solution */", "SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading aided with sensor data BIT[1]", "four year cycle */ uint32 q_GloFixTimeMs; /* Glonass fix time", "Gyro 0x0000FFFC - Reserved A bit set to 1 indicates", "GPS SVs detected by searcher, including ones not used in", "q_FltClockBiasMeters; /* Receiver clock bias in meters */ uint32 q_FltClockBiasSigmaMeters;", "= l.split(\";\")[0].split() #print(typ, nam) if typ == \"float\" or '_Flt'", "clock bias in meters */ uint32 q_FltGGTBMeters; /* GPS to", "meters */ uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for", "\"\"\" uint8_t version; uint32_t f_count; uint16_t week; uint32_t milliseconds; float", "BeiDou time bias uncertainty in meters */ uint32 q_FltBGTBMeters; /*", "good_observations; uint16_t parity_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t", "version; uint32_t f_count; uint16_t week; uint32_t milliseconds; float time_bias; float", "= 0x1838 LOG_GNSS_CD_DB_REPORT = 0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT =", "to Glonass time bias uncertainty in meters */ uint32 q_FltFiltGBTBMeters;", "in seconds */ uint32 q_FltClockDriftMps; /* Clock drift (clock frequency", "position calculation */ \"\"\" def name_to_camelcase(nam): ret = [] i", "ellipse. In units of meters. */ uint32 q_FltPosSigmaVertical; /* Gaussian", "*/ uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for BeiDou", "/* Semimajor axis of final horizontal position uncertainty error ellipse.", "Clock drift (clock frequency bias) in meters per second */", "BIT[2] 0x00000004 <96> Position aided with sensor data BIT[3] 0x00000008", "*/ uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value for BeiDou to", "or '_Flt' in nam: st += \"f\" elif typ ==", "heading uncertainty in radians derived from GNSS only solution */", "heading in radians */ uint32 q_FltHeadingUncRad; /* User heading uncertainty", "GNSS only solution */ uint32 q_FltGnssHeadingUncRad; /* User heading uncertainty", "observations; uint8_t good_observations; uint16_t parity_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t", "GPS fix time of week of in milliseconds */ uint8", "per second. */ uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value for", "LOG_GNSS_ME_DPO_STATUS = 0x1838 LOG_GNSS_CD_DB_REPORT = 0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT", "bias in meters */ uint32 q_FltGGTBMeters; /* GPS to Glonass", "*/ uint32 q_FltBGTBMeters; /* BeiDou to Glonass time bias in", "q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to BeiDou", "*/ uint32 q_FltGGTBMeters; /* GPS to Glonass time bias in", "seconds */ uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value for SFT", "uint32 q_FltGnssHeadingUncRad; /* User heading uncertainty in radians derived from", "from North. In units of degrees. */ uint32 q_FltEllipseSemimajorAxis; /*", "as computed by WLS */ uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma", "database */ uint32 q_Reserved1; /* Reserved memory field */ uint16", "[\"uint16\", \"uint16_t\"]: st += \"H\" elif typ in [\"int16\", \"int16_t\"]:", "with sensor data BIT[2] 0x00000004 <96> Position aided with sensor", "\"\"\" uint8_t version; uint32_t f_count; uint8_t glonass_cycle_number; uint16_t glonass_number_of_days; uint32_t", "height-above-ellipsoid altitude in meters as computed by WLS */ uint32", "*/ uint32 _fake_align_week_number; uint16 w_GpsWeekNumber; /* GPS week number of", "DM log 0x1476 documentation) */ uint32 _fake_align_week_number; uint16 w_GpsWeekNumber; /*", "of meters. */ uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis of final", "== \"_\": ret.append(nam[i+1].upper()) i += 2 else: ret.append(nam[i]) i +=", "= \"\"\" uint8_t version; uint32_t f_count; uint16_t week; uint32_t milliseconds;", "= \"\"\" uint8_t version; uint32_t f_count; uint8_t glonass_cycle_number; uint16_t glonass_number_of_days;", "in meters per second */ uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid", "*/ uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS", "User heading uncertainty in radians derived from GNSS only solution", "High */ uint8 u_VerticalReliability; /* Vertical position reliability */ uint16", "\"\"\" uint8_t sv_id; int8_t frequency_index; uint8_t observation_state; // SVObservationStates uint8_t", "increasing angles moving clockwise from North. In units of degrees.", "uint32 q_FltClockDriftMps; /* Clock drift (clock frequency bias) in meters", "u_FailureCode; /* Failure code: (see DM log 0x1476 documentation) */", "Gaussian 1-sigma value for east, north, up components of user", "with sensor data BIT[3] 0x00000008 <96> Velocity aided with sensor", "struct import unpack_from, calcsize LOG_GNSS_POSITION_REPORT = 0x1476 LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477", "in milliseconds */ uint32 q_PosCount; /* Integer count of the", "uncertainty ellipse values */ uint32 q_FltEllipseAngle; /* Angle of semimajor", "documentation) */ uint16 w_FixEvents; /* Fix events bit field: (see", "0: None 1: Weighted least-squares 2: Kalman filter 3: Externally", "fix. BIT[0] 0x00000001 <96> Accelerometer BIT[1] 0x00000002 <96> Gyro 0x0000FFFC", "longitude of position in radians */ uint32 q_FltFinalPosAlt; /* Final", "year cycle */ uint32 q_GloFixTimeMs; /* Glonass fix time of", "uncertainty in meters */ uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou to", "defined by the SENSOR_AIDING_MASK were aided with sensor data*/ uint32", "u_TotalGpsSvs; /* Total number of GPS SVs detected by searcher,", "elif typ in [\"uint8\", \"uint8_t\"]: st += \"B\" elif typ", "*/ uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS", "Externally injected 4: Internal database */ uint32 q_Reserved1; /* Reserved", "q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for BeiDou to Glonass", "semimajor axis with respect to true North, with increasing angles", "\"I\" elif typ in [\"int32\", \"int32_t\"]: st += \"i\" elif", "0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1 LOG_GNSS_ME_DPO_STATUS = 0x1838 LOG_GNSS_CD_DB_REPORT = 0x147B", "st[-1]*(cnt-1) for i in range(cnt): nams.append(\"%s[%d]\" % (nam.split(\"[\")[0], i)) else:", "*/ uint32 q_FltFiltGBTBMeters; /* Filtered GPS to BeiDou time bias", "position fix. BIT[0] 0x00000001 <96> Accelerometer BIT[1] 0x00000002 <96> Gyro", "axis of final horizontal position uncertainty error ellipse. In units", "return ''.join(ret) def parse_struct(ss): st = \"<\" nams = []", "LOG_GNSS_CONFIGURATION_STATE = 0x1516 glonass_measurement_report = \"\"\" uint8_t version; uint32_t f_count;", "matrix */ uint32 q_FltVdop; /* Vertical position dilution of precision", "/* Number of Glonass four year cycles */ uint16 w_GloNumDaysInFourYear;", "calculation */ uint8 u_NumGloSvsUsed; /* The number of Glonass SVs", "q_FltRawAlt; /* Raw height-above-ellipsoid altitude in meters as computed by", "range(cnt): nams.append(\"%s[%d]\" % (nam.split(\"[\")[0], i)) else: nams.append(nam) return st, nams", "\"\"\" position_report = \"\"\" uint8 u_Version; /* Version number of", "*/ uint16 w_Reserved2; /* Reserved memory field */ uint32 q_FltGnssHeadingRad;", "BeiDou to Glonass time bias uncertainty in meters */ uint32", "/* Reserved memory field */ uint32 q_FltGnssHeadingRad; /* User heading", "documentation) */ uint32 q_PosVelFlag2; /* Position velocity 2 bit field:", "observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations; uint16_t parity_error_count; uint8_t", "#print(typ, nam) if typ == \"float\" or '_Flt' in nam:", "position reliability 0: Not set 1: Very Low 2: Low", "<96> Position aided with sensor data BIT[3] 0x00000008 <96> Velocity", "position_report = \"\"\" uint8 u_Version; /* Version number of DM", "False): st, nams = parse_struct(ss) if camelcase: nams = [name_to_camelcase(x)", "meters */ uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value for BeiDou", "calculation */ uint8 u_NumBdsSvsUsed; /* The number of BeiDou SVs", "from the unweighted uint32 q_FltHdop; /* Horizontal position dilution of", "to Glonass time bias uncertainty in meters */ uint32 q_FltGBTBMeters;", "elif typ in [\"uint16\", \"uint16_t\"]: st += \"H\" elif typ", "height-above-ellipsoid altitude in meters */ uint32 q_FltRawAlt; /* Raw height-above-ellipsoid", "in position calculation */ uint8 u_NumBdsSvsUsed; /* The number of", "final position height-above-ellipsoid altitude in meters */ uint8 u_HorizontalReliability; /*", "including ones not used in position calculation */ uint8 u_NumGloSvsUsed;", "log 0x1476 documentation) */ uint32 _fake_align_week_number; uint16 w_GpsWeekNumber; /* GPS", "uint8_t sv_id; uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations;", "position uncertainty error ellipse. In units of meters. */ uint32", "day in four year cycle */ uint32 q_GloFixTimeMs; /* Glonass", "for x in nams] sz = calcsize(st) return lambda x:", "as computed by WLS in seconds */ uint32 q_FltSftOffsetSigmaSec; /*", "uint8_t sv_count; \"\"\" glonass_measurement_report_sv = \"\"\" uint8_t sv_id; int8_t frequency_index;", "bias uncertainty in meters */ uint32 q_FltFiltGBTBMeters; /* Filtered GPS", "to BeiDou time bias uncertainty in meters */ uint32 q_FltFiltBGTBMeters;", "*/ uint32 q_FltHeadingUncRad; /* User heading uncertainty in radians */", "1-sigma value for filtered height-above-ellipsoid altitude in meters */ uint32", "/* Source of position information */ /* 0: None 1:", "Heading aided with sensor data BIT[1] 0x00000002 <96> Speed aided", "<96> Heading aided with sensor data BIT[1] 0x00000002 <96> Speed", "/* Final latitude and longitude of position in radians */", "bias in meters */ uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value", "Integer count of the number of unique positions reported */", "float clock_frequency_uncertainty; uint8_t sv_count; \"\"\" glonass_measurement_report_sv = \"\"\" uint8_t sv_id;", "*/ uint32 q_FltClockDriftMps; /* Clock drift (clock frequency bias) in", "covariance matrix */ uint32 q_FltVdop; /* Vertical position dilution of", "component of the position report was assisted with additional sensors", "data BIT[2] 0x00000004 <96> Position aided with sensor data BIT[3]", "in meters */ uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma value", "BeiDou to Glonass time bias in meters */ uint32 q_FltFiltBGTBSigmaMeters;", "uint32 q_PosVelFlag2; /* Position velocity 2 bit field: (see DM", "uint8_t version; uint32_t f_count; uint16_t week; uint32_t milliseconds; float time_bias;", "*/ uint32 q_FltEllipseAngle; /* Angle of semimajor axis with respect", "Filtered height-above-ellipsoid altitude in meters as computed by WLS */", "q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value for clock drift in meters", "to Glonass time bias in meters */ uint32 q_FltFiltGGTBSigmaMeters; /*", "GPS to Glonass time bias in meters */ uint32 q_FltFiltGGTBSigmaMeters;", "/* User heading in radians */ uint32 q_FltHeadingUncRad; /* User", "uint8_t hemming_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval;", "st += \"I\" elif typ in [\"int32\", \"int32_t\"]: st +=", "LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1 LOG_GNSS_ME_DPO_STATUS", "Glonass SVs used in the fix */ uint8 u_TotalGloSvs; /*", "by WLS */ uint32 q_FltRawAltSigma; /* Gaussian 1-sigma value for", "clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; \"\"\" glonass_measurement_report_sv = \"\"\" uint8_t", "/* Receiver clock bias in meters */ uint32 q_FltClockBiasSigmaMeters; /*", "the unweighted least-squares covariance matrix */ uint8 u_EllipseConfidence; /* Statistical", "for filtered height-above-ellipsoid altitude in meters */ uint32 q_FltRawAlt; /*", "GPS to BeiDou time bias in meters */ uint32 q_FltGBTBSigmaMeters;", "Filtered Gaussian 1-sigma value for GPS to BeiDou time bias", "second */ uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value for clock", "'_Flt' in nam: st += \"f\" elif typ == \"double\"", "*/ uint32 q_FltRawAlt; /* Raw height-above-ellipsoid altitude in meters as", "in meters */ uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value for", "uint32 q_FltHeadingUncRad; /* User heading uncertainty in radians */ uint32", "North, with increasing angles moving clockwise from North. In units", "f_count; uint8_t glonass_cycle_number; uint16_t glonass_number_of_days; uint32_t milliseconds; float time_bias; float", "of DM log */ uint32 q_Fcount; /* Local millisecond counter", "covariance matrix */ uint8 u_EllipseConfidence; /* Statistical measure of the", "uint32 q_PosCount; /* Integer count of the number of unique", "*/ uint8 u_EllipseConfidence; /* Statistical measure of the confidence (percentage)", "= 0x1477 LOG_GNSS_CLOCK_REPORT = 0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT =", "f_count; uint16_t week; uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float", "/* Local millisecond counter */ uint8 u_PosSource; /* Source of", "*/ uint8 u_NumGpsSvsUsed; /* The number of GPS SVs used", "camelcase = False): st, nams = parse_struct(ss) if camelcase: nams", "by WLS */ uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma value for", "memory field */ uint32 q_FltGnssHeadingRad; /* User heading in radians", "+= \"h\" elif typ == \"uint64\": st += \"Q\" else:", "q_GpsFixTimeMs; /* GPS fix time of week of in milliseconds", "bias in meters */ uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value", "four year cycles */ uint16 w_GloNumDaysInFourYear; /* Glonass calendar day", "uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to", "SVObservationStates uint8_t observations; uint8_t good_observations; uint8_t hemming_error_count; uint8_t filter_stages; uint16_t", "value for BeiDou to Glonass time bias uncertainty in meters", "from the unweighted least-squares covariance matrix */ uint8 u_EllipseConfidence; /*", "position */ uint32 q_GpsFixTimeMs; /* GPS fix time of week", "bias in meters */ uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma", "second */ uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude in meters", "bias uncertainty in meters */ uint32 q_FltGBTBMeters; /* GPS to", "typ == \"uint64\": st += \"Q\" else: print(\"unknown type\", typ)", "altitude in meters */ uint8 u_HorizontalReliability; /* Horizontal position reliability", "bias uncertainty in meters */ uint32 q_FltFiltGGTBMeters; /* Filtered GPS", "to compute this position fix. BIT[0] 0x00000001 <96> Accelerometer BIT[1]", "q_FltHdop; /* Horizontal position dilution of precision as computed from", "Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty", "q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude in meters as computed by", "time_bias; float clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; \"\"\"", "in position calculation */ uint8 u_NumGloSvsUsed; /* The number of", "ss.strip().split(\"\\n\"): typ, nam = l.split(\";\")[0].split() #print(typ, nam) if typ ==", "only solution */ uint32 q_SensorDataUsageMask; /* Denotes which additional sensor", "Local millisecond counter */ uint8 u_PosSource; /* Source of position", "in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading aided with sensor data", "in position calculation */ \"\"\" def name_to_camelcase(nam): ret = []", "[\"uint32\", \"uint32_t\"]: st += \"I\" elif typ in [\"int32\", \"int32_t\"]:", "elif typ in [\"int16\", \"int16_t\"]: st += \"h\" elif typ", "/* Denotes which additional sensor data were used to compute", "Gaussian 1-sigma value for SFT offset in seconds */ uint32", "== \"float\" or '_Flt' in nam: st += \"f\" elif", "in milliseconds */ uint8 u_GloNumFourYear; /* Number of Glonass four", "value for receiver clock bias in meters */ uint32 q_FltGGTBMeters;", "drift (clock frequency bias) in meters per second */ uint32", "BIT[1] 0x00000002 <96> Gyro 0x0000FFFC - Reserved A bit set", "GPS week number of position */ uint32 q_GpsFixTimeMs; /* GPS", "*/ /* 0: None 1: Weighted least-squares 2: Kalman filter", "which additional sensor data were used to compute this position", "q_Reserved1; /* Reserved memory field */ uint16 w_PosVelFlag; /* Position", "q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value for east, north, up components", "float unfiltered_time_uncertainty; float unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t misc_status;", "meters as computed by WLS */ uint32 q_FltRawAltSigma; /* Gaussian", "In units of meters. */ uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma", "in [\"uint8\", \"uint8_t\"]: st += \"B\" elif typ in [\"int8\",", "uint8_t good_observations; uint8_t hemming_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency;", "*/ \"\"\" def name_to_camelcase(nam): ret = [] i = 0", "unique positions reported */ uint64 t_DblFinalPosLatLon[2]; /* Final latitude and", "Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty", "sensor data were used to compute this position fix. BIT[0]", "/* Raw height-above-ellipsoid altitude in meters as computed by WLS", "meters per second */ uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value", "/* Statistical measure of the confidence (percentage) associated with the", "q_FltVelEnuMps[3]; /* User velocity in east, north, up coordinate frame.", "error ellipse. In units of meters. */ uint32 q_FltPosSigmaVertical; /*", "GPS to BeiDou time bias in meters */ uint32 q_FltFiltGBTBSigmaMeters;", "typ) assert False if '[' in nam: cnt = int(nam.split(\"[\")[1].split(\"]\")[0])", "typ in [\"int8\", \"int8_t\"]: st += \"b\" elif typ in", "*/ uint32 q_PosVelFlag2; /* Position velocity 2 bit field: (see", "the confidence (percentage) associated with the uncertainty ellipse values */", "/* GPS to Glonass time bias in meters */ uint32", "int8_t frequency_index; uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations;", "height-above-ellipsoid altitude in meters */ uint32 align_Flt[14]; uint32 q_FltPdop; /*", "radians */ uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude of position", "in meters as computed by WLS */ uint32 q_FltFilteredAltSigma; /*", "/* Filtered Gaussian 1-sigma value for BeiDou to Glonass time", "/* Total number of Glonass SVs detected by searcher, including", "meters */ uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value for receiver", "gps_measurement_report_sv = \"\"\" uint8_t sv_id; uint8_t observation_state; // SVObservationStates uint8_t", "(see DM log 0x1476 documentation) */ uint16 w_FixEvents; /* Fix", "*/ uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude in meters as", "q_FltPosSigmaVertical; /* Gaussian 1-sigma value for final position height-above-ellipsoid altitude", "_fake_align_week_number; uint16 w_GpsWeekNumber; /* GPS week number of position */", "if camelcase: nams = [name_to_camelcase(x) for x in nams] sz", "\"int16_t\"]: st += \"h\" elif typ == \"uint64\": st +=", "data*/ uint32 q_SensorAidMask; /* Denotes which component of the position", "uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value for east, north, up", "typ == \"double\" or '_Dbl' in nam: st += \"d\"", "in [\"uint16\", \"uint16_t\"]: st += \"H\" elif typ in [\"int16\",", "\"_\": ret.append(nam[i+1].upper()) i += 2 else: ret.append(nam[i]) i += 1", "memory field */ uint16 w_PosVelFlag; /* Position velocity bit field:", "of in milliseconds */ uint8 u_GloNumFourYear; /* Number of Glonass", "precision as computed from the unweighted uint32 q_FltHdop; /* Horizontal", "q_GloFixTimeMs; /* Glonass fix time of day in milliseconds */", "= 0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT =", "sv_id; int8_t frequency_index; uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t", "the unweighted uint32 q_FltHdop; /* Horizontal position dilution of precision", "clock bias in meters */ uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma", "User heading in radians */ uint32 q_FltHeadingUncRad; /* User heading", "position calculation */ uint8 u_NumBdsSvsUsed; /* The number of BeiDou", "float time_bias; float clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count;", "*/ uint32 q_FltHeadingRad; /* User heading in radians */ uint32", "== \"double\" or '_Dbl' in nam: st += \"d\" elif", "A bit set to 1 indicates that certain fields as", "of final horizontal position uncertainty error ellipse. In units of", "= \"\"\" uint8_t sv_id; int8_t frequency_index; uint8_t observation_state; // SVObservationStates", "float fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; \"\"\" position_report", "in meters as computed by WLS */ uint32 q_FltRawAltSigma; /*", "time bias uncertainty in meters */ uint32 q_FltFiltGGTBMeters; /* Filtered", "uint8 u_HorizontalReliability; /* Horizontal position reliability 0: Not set 1:", "if '[' in nam: cnt = int(nam.split(\"[\")[1].split(\"]\")[0]) st += st[-1]*(cnt-1)", "\"\"\" gps_measurement_report = \"\"\" uint8_t version; uint32_t f_count; uint16_t week;", "Weighted least-squares 2: Kalman filter 3: Externally injected 4: Internal", "carrier_noise; int16_t latency; uint8_t predetect_interval; uint16_t postdetections; uint32_t unfiltered_measurement_integral; float", "\"int32_t\"]: st += \"i\" elif typ in [\"uint16\", \"uint16_t\"]: st", "of position */ uint32 q_FltHeadingRad; /* User heading in radians", "ones not used in position calculation */ \"\"\" def name_to_camelcase(nam):", "that certain fields as defined by the SENSOR_AIDING_MASK were aided", "Position velocity 2 bit field: (see DM log 0x1476 documentation)", "sensor data BIT[1] 0x00000002 <96> Speed aided with sensor data", "0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1 LOG_GNSS_ME_DPO_STATUS = 0x1838", "u_TotalBdsSvs; /* Total number of BeiDou SVs detected by searcher,", "Glonass fix time of day in milliseconds */ uint32 q_PosCount;", "to BeiDou time bias uncertainty in meters */ uint32 q_FltBGTBMeters;", "which component of the position report was assisted with additional", "computed from the unweighted least-squares covariance matrix */ uint8 u_EllipseConfidence;", "q_FltEllipseAngle; /* Angle of semimajor axis with respect to true", "calculation */ \"\"\" def name_to_camelcase(nam): ret = [] i =", "*/ uint32 q_FltRawAltSigma; /* Gaussian 1-sigma value for raw height-above-ellipsoid", "/* Gaussian 1-sigma value for clock drift in meters per", "float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; \"\"\" gps_measurement_report_sv = \"\"\"", "1-sigma value for raw height-above-ellipsoid altitude in meters */ uint32", "week number of position */ uint32 q_GpsFixTimeMs; /* GPS fix", "in meters */ uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value for", "0x1476 documentation) */ uint16 w_FixEvents; /* Fix events bit field:", "float azimuth; float elevation; int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float fine_speed;", "st += \"h\" elif typ == \"uint64\": st += \"Q\"", "q_FltClockDriftMps; /* Clock drift (clock frequency bias) in meters per", "0x14E1 LOG_GNSS_ME_DPO_STATUS = 0x1838 LOG_GNSS_CD_DB_REPORT = 0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E", "1-sigma value for GPS to Glonass time bias uncertainty in", "in radians */ uint32 q_FltHeadingUncRad; /* User heading uncertainty in", "Reserved memory field */ uint16 w_PosVelFlag; /* Position velocity bit", "of BeiDou SVs used in the fix */ uint8 u_TotalBdsSvs;", "uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval; uint16_t postdetections; uint32_t unfiltered_measurement_integral;", "q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value for GPS to Glonass time", "defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading aided with sensor", "Vertical position reliability */ uint16 w_Reserved2; /* Reserved memory field", "the uncertainty ellipse values */ uint32 q_FltEllipseAngle; /* Angle of", "misc_status; uint32_t multipath_estimate; float azimuth; float elevation; int32_t carrier_phase_cycles_integral; uint16_t", "*/ uint8 u_TotalGpsSvs; /* Total number of GPS SVs detected", "value for filtered height-above-ellipsoid altitude in meters */ uint32 q_FltRawAlt;", "unfiltered_measurement_integral; float unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t", "time bias in meters */ uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma", "milliseconds */ uint8 u_GloNumFourYear; /* Number of Glonass four year", "in four year cycle */ uint32 q_GloFixTimeMs; /* Glonass fix", "uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value for BeiDou to Glonass", "0x1476 documentation) */ uint32 q_PosVelFlag2; /* Position velocity 2 bit", "not used in position calculation */ \"\"\" def name_to_camelcase(nam): ret", "(see DM log 0x1476 documentation) */ uint32 q_PosVelFlag2; /* Position", "for i in range(cnt): nams.append(\"%s[%d]\" % (nam.split(\"[\")[0], i)) else: nams.append(nam)", "in meters */ uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value for", "+= \"I\" elif typ in [\"int32\", \"int32_t\"]: st += \"i\"", "0x00000008 <96> Velocity aided with sensor data 0xFFFFFFF0 <96> Reserved", "Glonass time bias uncertainty in meters */ uint32 q_FltGBTBMeters; /*", "clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; \"\"\" gps_measurement_report_sv =", "/* SFT offset as computed by WLS in seconds */", "1-sigma value for east, north, up components of user velocity", "1 return ''.join(ret) def parse_struct(ss): st = \"<\" nams =", "dilution of precision as computed from the unweighted least-squares covariance", "with sensor data 0xFFFFFFF0 <96> Reserved */ uint8 u_NumGpsSvsUsed; /*", "number of Glonass SVs used in the fix */ uint8", "\"uint8_t\"]: st += \"B\" elif typ in [\"int8\", \"int8_t\"]: st", "<96> Gyro 0x0000FFFC - Reserved A bit set to 1", "sensor data*/ uint32 q_SensorAidMask; /* Denotes which component of the", "\"<\" nams = [] for l in ss.strip().split(\"\\n\"): typ, nam", "SENSOR_AIDING_MASK were aided with sensor data*/ uint32 q_SensorAidMask; /* Denotes", "uint32 q_SensorAidMask; /* Denotes which component of the position report", "of position information */ /* 0: None 1: Weighted least-squares", "uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis of final horizontal position uncertainty", "uint8_t observations; uint8_t good_observations; uint16_t parity_error_count; uint8_t filter_stages; uint16_t carrier_noise;", "uncertainty in meters */ uint32 q_FltFiltGGTBMeters; /* Filtered GPS to", "time of week of in milliseconds */ uint8 u_GloNumFourYear; /*", "0x00000002 <96> Speed aided with sensor data BIT[2] 0x00000004 <96>", "uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma value for final position height-above-ellipsoid", "q_Fcount; /* Local millisecond counter */ uint8 u_PosSource; /* Source", "east, north, up components of user velocity */ uint32 q_FltClockBiasMeters;", "*/ uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value for GPS to", "cnt = int(nam.split(\"[\")[1].split(\"]\")[0]) st += st[-1]*(cnt-1) for i in range(cnt):", "0x1838 LOG_GNSS_CD_DB_REPORT = 0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488", "Failure code: (see DM log 0x1476 documentation) */ uint16 w_FixEvents;", "dict_unpacker(ss, camelcase = False): st, nams = parse_struct(ss) if camelcase:", "*/ uint8 u_NumGloSvsUsed; /* The number of Glonass SVs used", "filtered height-above-ellipsoid altitude in meters */ uint32 q_FltRawAlt; /* Raw", "st, nams = parse_struct(ss) if camelcase: nams = [name_to_camelcase(x) for", "Gaussian 1-sigma value for raw height-above-ellipsoid altitude in meters */", "Version number of DM log */ uint32 q_Fcount; /* Local", "time bias in meters */ uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian", "set 1: Very Low 2: Low 3: Medium 4: High", "Filtered GPS to BeiDou time bias in meters */ uint32", "of BeiDou SVs detected by searcher, including ones not used", "typ in [\"uint32\", \"uint32_t\"]: st += \"I\" elif typ in", "LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477 LOG_GNSS_CLOCK_REPORT = 0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT", "w_GloNumDaysInFourYear; /* Glonass calendar day in four year cycle */", "nam: cnt = int(nam.split(\"[\")[1].split(\"]\")[0]) st += st[-1]*(cnt-1) for i in", "units of meters. */ uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis of", "uint32 _fake_align_week_number; uint16 w_GpsWeekNumber; /* GPS week number of position", "components of user velocity */ uint32 q_FltClockBiasMeters; /* Receiver clock", "with sensor data*/ uint32 q_SensorAidMask; /* Denotes which component of", "SFT offset in seconds */ uint32 q_FltClockDriftMps; /* Clock drift", "+= 1 return ''.join(ret) def parse_struct(ss): st = \"<\" nams", "*/ uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value for SFT offset", "*/ uint8 u_NumBdsSvsUsed; /* The number of BeiDou SVs used", "bias in meters */ uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value", "of the position report was assisted with additional sensors defined", "bias in meters */ uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma", "used in the fix */ uint8 u_TotalGpsSvs; /* Total number", "float clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; \"\"\" glonass_measurement_report_sv", "BIT[0] 0x00000001 <96> Heading aided with sensor data BIT[1] 0x00000002", "altitude in meters as computed by WLS */ uint32 q_FltFilteredAltSigma;", "in meters */ uint32 q_FltGBTBMeters; /* GPS to BeiDou time", "/* Gaussian 1-sigma value for GPS to Glonass time bias", "for SFT offset in seconds */ uint32 q_FltClockDriftMps; /* Clock", "*/ uint32 q_FltVelEnuMps[3]; /* User velocity in east, north, up", "meters */ uint32 align_Flt[14]; uint32 q_FltPdop; /* 3D position dilution", "uint8_t observations; uint8_t good_observations; uint8_t hemming_error_count; uint8_t filter_stages; uint16_t carrier_noise;", "Gaussian 1-sigma value for final position height-above-ellipsoid altitude in meters", "degrees. */ uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis of final horizontal", "in meters */ uint32 q_FltRawAlt; /* Raw height-above-ellipsoid altitude in", "observations; uint8_t good_observations; uint8_t hemming_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t", "\"double\" or '_Dbl' in nam: st += \"d\" elif typ", "/* Gaussian 1-sigma value for final position height-above-ellipsoid altitude in", "u_HorizontalReliability; /* Horizontal position reliability 0: Not set 1: Very", "*/ uint16 w_GloNumDaysInFourYear; /* Glonass calendar day in four year", "meters */ uint32 q_FltGBTBMeters; /* GPS to BeiDou time bias", "to Glonass time bias in meters */ uint32 q_FltFiltBGTBSigmaMeters; /*", "in [\"uint32\", \"uint32_t\"]: st += \"I\" elif typ in [\"int32\",", "the position report was assisted with additional sensors defined in", "cycle_slip_count; uint32_t pad; \"\"\" position_report = \"\"\" uint8 u_Version; /*", "nams = [name_to_camelcase(x) for x in nams] sz = calcsize(st)", "certain fields as defined by the SENSOR_AIDING_MASK were aided with", "LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488 LOG_GNSS_CONFIGURATION_STATE = 0x1516 glonass_measurement_report", "in radians derived from GNSS only solution */ uint32 q_FltGnssHeadingUncRad;", "meters */ uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou to Glonass time", "float unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t measurement_status;", "ret = [] i = 0 while i < len(nam):", "/* The number of Glonass SVs used in the fix", "in the fix */ uint8 u_TotalGpsSvs; /* Total number of", "events bit field: (see DM log 0x1476 documentation) */ uint32", "/* User heading uncertainty in radians */ uint32 q_FltVelEnuMps[3]; /*", "computed by WLS */ uint32 q_FltRawAltSigma; /* Gaussian 1-sigma value", "cycle_slip_count; uint32_t pad; \"\"\" gps_measurement_report = \"\"\" uint8_t version; uint32_t", "1-sigma value for BeiDou to Glonass time bias uncertainty in", "= int(nam.split(\"[\")[1].split(\"]\")[0]) st += st[-1]*(cnt-1) for i in range(cnt): nams.append(\"%s[%d]\"", "Position aided with sensor data BIT[3] 0x00000008 <96> Velocity aided", "= \"\"\" uint8_t sv_id; uint8_t observation_state; // SVObservationStates uint8_t observations;", "seconds */ uint32 q_FltClockDriftMps; /* Clock drift (clock frequency bias)", "sensor data BIT[3] 0x00000008 <96> Velocity aided with sensor data", "velocity */ uint32 q_FltClockBiasMeters; /* Receiver clock bias in meters", "with the uncertainty ellipse values */ uint32 q_FltEllipseAngle; /* Angle", "/* GPS week number of position */ uint32 q_GpsFixTimeMs; /*", "uncertainty in meters */ uint32 q_FltFiltGBTBMeters; /* Filtered GPS to", "[] for l in ss.strip().split(\"\\n\"): typ, nam = l.split(\";\")[0].split() #print(typ,", "offset in seconds */ uint32 q_FltClockDriftMps; /* Clock drift (clock", "GPS SVs used in the fix */ uint8 u_TotalGpsSvs; /*", "Not set 1: Very Low 2: Low 3: Medium 4:", "time bias in meters */ uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma", "in radians derived from GNSS only solution */ uint32 q_SensorDataUsageMask;", "by WLS in seconds */ uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma", "float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; \"\"\" position_report = \"\"\"", "values */ uint32 q_FltEllipseAngle; /* Angle of semimajor axis with", "u_GloNumFourYear; /* Number of Glonass four year cycles */ uint16", "In units of degrees. */ uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis", "fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; \"\"\" gps_measurement_report =", "<96> Reserved */ uint8 u_NumGpsSvsUsed; /* The number of GPS", "uint32 q_FltClockBiasMeters; /* Receiver clock bias in meters */ uint32", "u_EllipseConfidence; /* Statistical measure of the confidence (percentage) associated with", "time bias uncertainty in meters */ uint32 q_FltSftOffsetSec; /* SFT", "reliability */ uint16 w_Reserved2; /* Reserved memory field */ uint32", "with respect to true North, with increasing angles moving clockwise", "value for final position height-above-ellipsoid altitude in meters */ uint8", "meters */ uint8 u_HorizontalReliability; /* Horizontal position reliability 0: Not", "int(nam.split(\"[\")[1].split(\"]\")[0]) st += st[-1]*(cnt-1) for i in range(cnt): nams.append(\"%s[%d]\" %", "used in position calculation */ uint8 u_NumGloSvsUsed; /* The number", "assisted with additional sensors defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96>", "of Glonass SVs used in the fix */ uint8 u_TotalGloSvs;", "the unweighted least-squares covariance matrix */ uint32 q_FltVdop; /* Vertical", "\"float\" or '_Flt' in nam: st += \"f\" elif typ", "time bias in meters */ uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian", "u_NumGpsSvsUsed; /* The number of GPS SVs used in the", "0x00000001 <96> Heading aided with sensor data BIT[1] 0x00000002 <96>", "/* Vertical position dilution of precision as computed from the", "bit field: (see DM log 0x1476 documentation) */ uint8 u_FailureCode;", "/* BeiDou to Glonass time bias in meters */ uint32", "meters */ uint32 q_FltBGTBMeters; /* BeiDou to Glonass time bias", "GPS to Glonass time bias uncertainty in meters */ uint32", "0x00000001 <96> Accelerometer BIT[1] 0x00000002 <96> Gyro 0x0000FFFC - Reserved", "Low 3: Medium 4: High */ uint8 u_VerticalReliability; /* Vertical", "week of in milliseconds */ uint8 u_GloNumFourYear; /* Number of", "indicates that certain fields as defined by the SENSOR_AIDING_MASK were", "/* Gaussian 1-sigma value for raw height-above-ellipsoid altitude in meters", "the fix */ uint8 u_TotalBdsSvs; /* Total number of BeiDou", "in east, north, up coordinate frame. In meters per second.", "uncertainty error ellipse. In units of meters. */ uint32 q_FltEllipseSemiminorAxis;", "WLS in seconds */ uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value", "uint8 u_TotalGloSvs; /* Total number of Glonass SVs detected by", "[\"int8\", \"int8_t\"]: st += \"b\" elif typ in [\"uint32\", \"uint32_t\"]:", "/* Filtered GPS to BeiDou time bias in meters */", "uint32 q_FltVelEnuMps[3]; /* User velocity in east, north, up coordinate", "BIT[3] 0x00000008 <96> Velocity aided with sensor data 0xFFFFFFF0 <96>", "in nams] sz = calcsize(st) return lambda x: dict(zip(nams, unpack_from(st,", "elif typ in [\"uint32\", \"uint32_t\"]: st += \"I\" elif typ", "to BeiDou time bias in meters */ uint32 q_FltFiltGBTBSigmaMeters; /*", "unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t misc_status; uint32_t multipath_estimate; float", "unpack_from, calcsize LOG_GNSS_POSITION_REPORT = 0x1476 LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477 LOG_GNSS_CLOCK_REPORT =", "units of degrees. */ uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis of", "Final latitude and longitude of position in radians */ uint32", "for clock drift in meters per second */ uint32 q_FltFilteredAlt;", "position report was assisted with additional sensors defined in SENSOR_DATA_USAGE_MASK", "aided with sensor data BIT[3] 0x00000008 <96> Velocity aided with", "\"uint32_t\"]: st += \"I\" elif typ in [\"int32\", \"int32_t\"]: st", "uint32 q_FltHeadingRad; /* User heading in radians */ uint32 q_FltHeadingUncRad;", "/* Filtered BeiDou to Glonass time bias in meters */", "Total number of Glonass SVs detected by searcher, including ones", "uint8_t cycle_slip_count; uint32_t pad; \"\"\" gps_measurement_report = \"\"\" uint8_t version;", "if typ == \"float\" or '_Flt' in nam: st +=", "calendar day in four year cycle */ uint32 q_GloFixTimeMs; /*", "q_FltPdop; /* 3D position dilution of precision as computed from", "= False): st, nams = parse_struct(ss) if camelcase: nams =", "\"d\" elif typ in [\"uint8\", \"uint8_t\"]: st += \"B\" elif", "*/ uint32 q_FltFiltGGTBMeters; /* Filtered GPS to Glonass time bias", "'_Dbl' in nam: st += \"d\" elif typ in [\"uint8\",", "time bias uncertainty in meters */ uint32 q_FltFiltBGTBMeters; /* Filtered", "i = 0 while i < len(nam): if nam[i] ==", "ret.append(nam[i]) i += 1 return ''.join(ret) def parse_struct(ss): st =", "\"h\" elif typ == \"uint64\": st += \"Q\" else: print(\"unknown", "*/ uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma value for filtered height-above-ellipsoid", "sz = calcsize(st) return lambda x: dict(zip(nams, unpack_from(st, x))), sz", "used in the fix */ uint8 u_TotalBdsSvs; /* Total number", "velocity bit field: (see DM log 0x1476 documentation) */ uint32", "uint8 u_VerticalReliability; /* Vertical position reliability */ uint16 w_Reserved2; /*", "value for raw height-above-ellipsoid altitude in meters */ uint32 align_Flt[14];", "*/ uint32 q_Fcount; /* Local millisecond counter */ uint8 u_PosSource;", "aided with sensor data*/ uint32 q_SensorAidMask; /* Denotes which component", "''.join(ret) def parse_struct(ss): st = \"<\" nams = [] for", "uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations; uint16_t parity_error_count;", "st += \"B\" elif typ in [\"int8\", \"int8_t\"]: st +=", "Reserved A bit set to 1 indicates that certain fields", "filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval; uint16_t postdetections; uint32_t", "uint8 u_NumBdsSvsUsed; /* The number of BeiDou SVs used in", "uint32 align_Flt[14]; uint32 q_FltPdop; /* 3D position dilution of precision", "velocity in east, north, up coordinate frame. In meters per", "name_to_camelcase(nam): ret = [] i = 0 while i <", "LOG_GNSS_CLOCK_REPORT = 0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT", "least-squares covariance matrix */ uint32 q_FltVdop; /* Vertical position dilution", "i)) else: nams.append(nam) return st, nams def dict_unpacker(ss, camelcase =", "additional sensors defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading aided", "LOG_GNSS_CD_DB_REPORT = 0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488 LOG_GNSS_CONFIGURATION_STATE", "st = \"<\" nams = [] for l in ss.strip().split(\"\\n\"):", "Internal database */ uint32 q_Reserved1; /* Reserved memory field */", "bit field: (see DM log 0x1476 documentation) */ uint32 q_PosVelFlag2;", "Low 2: Low 3: Medium 4: High */ uint8 u_VerticalReliability;", "uint32 q_FltFiltGGTBMeters; /* Filtered GPS to Glonass time bias in", "in the fix */ uint8 u_TotalGloSvs; /* Total number of", "clock_frequency_uncertainty; uint8_t sv_count; \"\"\" glonass_measurement_report_sv = \"\"\" uint8_t sv_id; int8_t", "as computed from the unweighted uint32 q_FltHdop; /* Horizontal position", "ones not used in position calculation */ uint8 u_NumBdsSvsUsed; /*", "altitude in meters */ uint32 q_FltRawAlt; /* Raw height-above-ellipsoid altitude", "/* Filtered height-above-ellipsoid altitude in meters as computed by WLS", "data 0xFFFFFFF0 <96> Reserved */ uint8 u_NumGpsSvsUsed; /* The number", "of GPS SVs used in the fix */ uint8 u_TotalGpsSvs;", "*/ uint8 u_GloNumFourYear; /* Number of Glonass four year cycles", "time of day in milliseconds */ uint32 q_PosCount; /* Integer", "uncertainty in meters */ uint32 q_FltSftOffsetSec; /* SFT offset as", "uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value for GPS to Glonass", "/* Integer count of the number of unique positions reported", "in [\"int16\", \"int16_t\"]: st += \"h\" elif typ == \"uint64\":", "log */ uint32 q_Fcount; /* Local millisecond counter */ uint8", "angles moving clockwise from North. In units of degrees. */", "[name_to_camelcase(x) for x in nams] sz = calcsize(st) return lambda", "height-above-ellipsoid altitude of position */ uint32 q_FltHeadingRad; /* User heading", "0x00000004 <96> Position aided with sensor data BIT[3] 0x00000008 <96>", "*/ uint16 w_PosVelFlag; /* Position velocity bit field: (see DM", "q_FltGGTBMeters; /* GPS to Glonass time bias in meters */", "<96> Accelerometer BIT[1] 0x00000002 <96> Gyro 0x0000FFFC - Reserved A", "*/ uint64 t_DblFinalPosLatLon[2]; /* Final latitude and longitude of position", "\"B\" elif typ in [\"int8\", \"int8_t\"]: st += \"b\" elif", "uint8_t sv_count; \"\"\" gps_measurement_report_sv = \"\"\" uint8_t sv_id; uint8_t observation_state;", "Position velocity bit field: (see DM log 0x1476 documentation) */", "error ellipse. In units of meters. */ uint32 q_FltEllipseSemiminorAxis; /*", "uint32 q_FltPdop; /* 3D position dilution of precision as computed", "*/ uint8 u_VerticalReliability; /* Vertical position reliability */ uint16 w_Reserved2;", "st += \"H\" elif typ in [\"int16\", \"int16_t\"]: st +=", "Filtered BeiDou to Glonass time bias in meters */ uint32", "of Glonass SVs detected by searcher, including ones not used", "uint32 q_FltGnssHeadingRad; /* User heading in radians derived from GNSS", "with sensor data BIT[1] 0x00000002 <96> Speed aided with sensor", "meters as computed by WLS */ uint32 q_FltFilteredAltSigma; /* Gaussian", "respect to true North, with increasing angles moving clockwise from", "1: Weighted least-squares 2: Kalman filter 3: Externally injected 4:", "meters per second */ uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude", "uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty;", "meters per second. */ uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value", "*/ uint16 w_FixEvents; /* Fix events bit field: (see DM", "/* Clock drift (clock frequency bias) in meters per second", "searcher, including ones not used in position calculation */ uint8", "fix */ uint8 u_TotalGloSvs; /* Total number of Glonass SVs", "coordinate frame. In meters per second. */ uint32 q_FltVelSigmaMps[3]; /*", "*/ uint32 q_FltVdop; /* Vertical position dilution of precision as", "per second */ uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value for", "meters */ uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma value for", "import unpack_from, calcsize LOG_GNSS_POSITION_REPORT = 0x1476 LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477 LOG_GNSS_CLOCK_REPORT", "<96> Velocity aided with sensor data 0xFFFFFFF0 <96> Reserved */", "matrix */ uint8 u_EllipseConfidence; /* Statistical measure of the confidence", "reliability 0: Not set 1: Very Low 2: Low 3:", "/* Gaussian 1-sigma value for SFT offset in seconds */", "4: Internal database */ uint32 q_Reserved1; /* Reserved memory field", "/* The number of BeiDou SVs used in the fix", "0xFFFFFFF0 <96> Reserved */ uint8 u_NumGpsSvsUsed; /* The number of", "+= \"d\" elif typ in [\"uint8\", \"uint8_t\"]: st += \"B\"", "/* Horizontal position reliability 0: Not set 1: Very Low", "north, up components of user velocity */ uint32 q_FltClockBiasMeters; /*", "uint8_t cycle_slip_count; uint32_t pad; \"\"\" position_report = \"\"\" uint8 u_Version;", "to true North, with increasing angles moving clockwise from North.", "1-sigma value for receiver clock bias in meters */ uint32", "of semimajor axis with respect to true North, with increasing", "cycles */ uint16 w_GloNumDaysInFourYear; /* Glonass calendar day in four", "Glonass time bias uncertainty in meters */ uint32 q_FltSftOffsetSec; /*", "parse_struct(ss): st = \"<\" nams = [] for l in", "[\"int32\", \"int32_t\"]: st += \"i\" elif typ in [\"uint16\", \"uint16_t\"]:", "q_FltFiltBGTBMeters; /* Filtered BeiDou to Glonass time bias in meters", "= [name_to_camelcase(x) for x in nams] sz = calcsize(st) return", "of precision as computed from the unweighted least-squares covariance matrix", "align_Flt[14]; uint32 q_FltPdop; /* 3D position dilution of precision as", "Denotes which additional sensor data were used to compute this", "fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; \"\"\" position_report =", "= 0x1476 LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477 LOG_GNSS_CLOCK_REPORT = 0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT =", "\"b\" elif typ in [\"uint32\", \"uint32_t\"]: st += \"I\" elif", "False if '[' in nam: cnt = int(nam.split(\"[\")[1].split(\"]\")[0]) st +=", "== \"uint64\": st += \"Q\" else: print(\"unknown type\", typ) assert", "of week of in milliseconds */ uint8 u_GloNumFourYear; /* Number", "observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations; uint8_t hemming_error_count; uint8_t", "glonass_measurement_report_sv = \"\"\" uint8_t sv_id; int8_t frequency_index; uint8_t observation_state; //", "per second */ uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude in", "l in ss.strip().split(\"\\n\"): typ, nam = l.split(\";\")[0].split() #print(typ, nam) if", "def parse_struct(ss): st = \"<\" nams = [] for l", "including ones not used in position calculation */ uint8 u_NumBdsSvsUsed;", "documentation) */ uint8 u_FailureCode; /* Failure code: (see DM log", "meters */ uint32 q_FltFiltGBTBMeters; /* Filtered GPS to BeiDou time", "Denotes which component of the position report was assisted with", "used in the fix */ uint8 u_TotalGloSvs; /* Total number", "uint8 u_Version; /* Version number of DM log */ uint32", "from the unweighted least-squares covariance matrix */ uint32 q_FltVdop; /*", "with additional sensors defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading", "ellipse. In units of meters. */ uint32 q_FltEllipseSemiminorAxis; /* Semiminor", "3: Externally injected 4: Internal database */ uint32 q_Reserved1; /*", "len(nam): if nam[i] == \"_\": ret.append(nam[i+1].upper()) i += 2 else:", "final horizontal position uncertainty error ellipse. In units of meters.", "1-sigma value for clock drift in meters per second */", "\"\"\" uint8 u_Version; /* Version number of DM log */", "Statistical measure of the confidence (percentage) associated with the uncertainty", "/* Glonass calendar day in four year cycle */ uint32", "/* Reserved memory field */ uint16 w_PosVelFlag; /* Position velocity", "version; uint32_t f_count; uint8_t glonass_cycle_number; uint16_t glonass_number_of_days; uint32_t milliseconds; float", "bit set to 1 indicates that certain fields as defined", "GNSS only solution */ uint32 q_SensorDataUsageMask; /* Denotes which additional", "up coordinate frame. In meters per second. */ uint32 q_FltVelSigmaMps[3];", "None 1: Weighted least-squares 2: Kalman filter 3: Externally injected", "as defined by the SENSOR_AIDING_MASK were aided with sensor data*/", "Glonass time bias in meters */ uint32 q_FltGGTBSigmaMeters; /* Gaussian", "uint16 w_GloNumDaysInFourYear; /* Glonass calendar day in four year cycle", "for final position height-above-ellipsoid altitude in meters */ uint8 u_HorizontalReliability;", "= 0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1 LOG_GNSS_ME_DPO_STATUS =", "sv_id; uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations; uint16_t", "uint8 u_NumGloSvsUsed; /* The number of Glonass SVs used in", "uint32 q_SensorDataUsageMask; /* Denotes which additional sensor data were used", "In meters per second. */ uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma", "DM log */ uint32 q_Fcount; /* Local millisecond counter */", "*/ uint32 q_FltClockBiasMeters; /* Receiver clock bias in meters */", "uint32 q_FltGGTBMeters; /* GPS to Glonass time bias in meters", "u_TotalGloSvs; /* Total number of Glonass SVs detected by searcher,", "In units of meters. */ uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis", "field */ uint16 w_PosVelFlag; /* Position velocity bit field: (see", "carrier_phase_cycles_fraction; float fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; \"\"\"", "Vertical position dilution of precision as computed from the unweighted", "Source of position information */ /* 0: None 1: Weighted", "uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value for SFT offset in", "unfiltered_time_uncertainty; float unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t misc_status; uint32_t", "of the number of unique positions reported */ uint64 t_DblFinalPosLatLon[2];", "DM log 0x1476 documentation) */ uint8 u_FailureCode; /* Failure code:", "altitude of position */ uint32 q_FltHeadingRad; /* User heading in", "uint32_t multipath_estimate; float azimuth; float elevation; int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction;", "of Glonass four year cycles */ uint16 w_GloNumDaysInFourYear; /* Glonass", "*/ uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude of position */", "w_Reserved2; /* Reserved memory field */ uint32 q_FltGnssHeadingRad; /* User", "q_FltHeadingUncRad; /* User heading uncertainty in radians */ uint32 q_FltVelEnuMps[3];", "BeiDou time bias uncertainty in meters */ uint32 q_FltFiltBGTBMeters; /*", "week; uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float clock_frequency_bias; float", "unweighted least-squares covariance matrix */ uint8 u_EllipseConfidence; /* Statistical measure", "Total number of BeiDou SVs detected by searcher, including ones", "the fix */ uint8 u_TotalGpsSvs; /* Total number of GPS", "receiver clock bias in meters */ uint32 q_FltGGTBMeters; /* GPS", "uint8 u_GloNumFourYear; /* Number of Glonass four year cycles */", "Glonass time bias in meters */ uint32 q_FltBGTBSigmaMeters; /* Gaussian", "for east, north, up components of user velocity */ uint32", "sv_count; \"\"\" glonass_measurement_report_sv = \"\"\" uint8_t sv_id; int8_t frequency_index; uint8_t", "/* Version number of DM log */ uint32 q_Fcount; /*", "milliseconds; float time_bias; float clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t", "// SVObservationStates uint8_t observations; uint8_t good_observations; uint8_t hemming_error_count; uint8_t filter_stages;", "0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488 LOG_GNSS_CONFIGURATION_STATE = 0x1516 glonass_measurement_report = \"\"\"", "position information */ /* 0: None 1: Weighted least-squares 2:", "bias uncertainty in meters */ uint32 q_FltBGTBMeters; /* BeiDou to", "uint32 q_FltRawAlt; /* Raw height-above-ellipsoid altitude in meters as computed", "0 while i < len(nam): if nam[i] == \"_\": ret.append(nam[i+1].upper())", "Medium 4: High */ uint8 u_VerticalReliability; /* Vertical position reliability", "uint8_t predetect_interval; uint16_t postdetections; uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction; float unfiltered_time_uncertainty;", "/* GPS to BeiDou time bias in meters */ uint32", "log 0x1476 documentation) */ uint32 q_PosVelFlag2; /* Position velocity 2", "*/ uint8 u_TotalBdsSvs; /* Total number of BeiDou SVs detected", "st += \"b\" elif typ in [\"uint32\", \"uint32_t\"]: st +=", "return st, nams def dict_unpacker(ss, camelcase = False): st, nams", "0x1488 LOG_GNSS_CONFIGURATION_STATE = 0x1516 glonass_measurement_report = \"\"\" uint8_t version; uint32_t", "typ in [\"int16\", \"int16_t\"]: st += \"h\" elif typ ==", "position reliability */ uint16 w_Reserved2; /* Reserved memory field */", "q_FltGnssHeadingUncRad; /* User heading uncertainty in radians derived from GNSS", "position calculation */ uint8 u_NumGloSvsUsed; /* The number of Glonass", "= 0 while i < len(nam): if nam[i] == \"_\":", "q_FltVdop; /* Vertical position dilution of precision as computed from", "/* Failure code: (see DM log 0x1476 documentation) */ uint16", "Accelerometer BIT[1] 0x00000002 <96> Gyro 0x0000FFFC - Reserved A bit", "*/ uint32 align_Flt[14]; uint32 q_FltPdop; /* 3D position dilution of", "Total number of GPS SVs detected by searcher, including ones", "Angle of semimajor axis with respect to true North, with", "detected by searcher, including ones not used in position calculation", "q_FltHeadingRad; /* User heading in radians */ uint32 q_FltHeadingUncRad; /*", "fix */ uint8 u_TotalBdsSvs; /* Total number of BeiDou SVs", "time bias uncertainty in meters */ uint32 q_FltGBTBMeters; /* GPS", "SVObservationStates uint8_t observations; uint8_t good_observations; uint16_t parity_error_count; uint8_t filter_stages; uint16_t", "in meters */ uint32 q_FltFiltGGTBMeters; /* Filtered GPS to Glonass", "in [\"int8\", \"int8_t\"]: st += \"b\" elif typ in [\"uint32\",", "x in nams] sz = calcsize(st) return lambda x: dict(zip(nams,", "q_PosCount; /* Integer count of the number of unique positions", "good_observations; uint8_t hemming_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t", "q_FltSftOffsetSec; /* SFT offset as computed by WLS in seconds", "in meters */ uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value for", "*/ uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou to Glonass time bias", "LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT", "\"\"\" uint8_t sv_id; uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t", "/* Gaussian 1-sigma value for east, north, up components of", "multipath_estimate; float azimuth; float elevation; int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float", "+= \"i\" elif typ in [\"uint16\", \"uint16_t\"]: st += \"H\"", "least-squares covariance matrix */ uint8 u_EllipseConfidence; /* Statistical measure of", "glonass_measurement_report = \"\"\" uint8_t version; uint32_t f_count; uint8_t glonass_cycle_number; uint16_t", "/* Horizontal position dilution of precision as computed from the", "elif typ in [\"int8\", \"int8_t\"]: st += \"b\" elif typ", "LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT", "in meters */ uint32 q_FltSftOffsetSec; /* SFT offset as computed", "i < len(nam): if nam[i] == \"_\": ret.append(nam[i+1].upper()) i +=", "parse_struct(ss) if camelcase: nams = [name_to_camelcase(x) for x in nams]", "def dict_unpacker(ss, camelcase = False): st, nams = parse_struct(ss) if", "4: High */ uint8 u_VerticalReliability; /* Vertical position reliability */", "0x1476 documentation) */ uint8 u_FailureCode; /* Failure code: (see DM", "uint8 u_FailureCode; /* Failure code: (see DM log 0x1476 documentation)", "as computed from the unweighted least-squares covariance matrix */ uint32", "bias in meters */ uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma", "w_FixEvents; /* Fix events bit field: (see DM log 0x1476", "for GPS to BeiDou time bias uncertainty in meters */", "Filtered Gaussian 1-sigma value for BeiDou to Glonass time bias", "uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value for clock drift in", "bias uncertainty in meters */ uint32 q_FltSftOffsetSec; /* SFT offset", "User velocity in east, north, up coordinate frame. In meters", "uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float unfiltered_speed; float unfiltered_speed_uncertainty;", "/* Gaussian 1-sigma value for receiver clock bias in meters", "q_FltEllipseSemiminorAxis; /* Semiminor axis of final horizontal position uncertainty error", "Glonass time bias uncertainty in meters */ uint32 q_FltFiltGBTBMeters; /*", "1: Very Low 2: Low 3: Medium 4: High */", "// SVObservationStates uint8_t observations; uint8_t good_observations; uint16_t parity_error_count; uint8_t filter_stages;", "were aided with sensor data*/ uint32 q_SensorAidMask; /* Denotes which", "aided with sensor data BIT[1] 0x00000002 <96> Speed aided with", "heading in radians derived from GNSS only solution */ uint32", "not used in position calculation */ uint8 u_NumBdsSvsUsed; /* The", "+= \"b\" elif typ in [\"uint32\", \"uint32_t\"]: st += \"I\"", "from GNSS only solution */ uint32 q_SensorDataUsageMask; /* Denotes which", "clock_frequency_uncertainty; uint8_t sv_count; \"\"\" gps_measurement_report_sv = \"\"\" uint8_t sv_id; uint8_t", "elevation; int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float fine_speed; float fine_speed_uncertainty; uint8_t", "set to 1 indicates that certain fields as defined by", "field: (see DM log 0x1476 documentation) */ uint32 q_PosVelFlag2; /*", "radians derived from GNSS only solution */ uint32 q_SensorDataUsageMask; /*", "*/ uint32 q_FltGnssHeadingRad; /* User heading in radians derived from", "latitude and longitude of position in radians */ uint32 q_FltFinalPosAlt;", "uint16_t carrier_phase_cycles_fraction; float fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad;", "/* Filtered Gaussian 1-sigma value for GPS to Glonass time", "the fix */ uint8 u_TotalGloSvs; /* Total number of Glonass", "radians */ uint32 q_FltVelEnuMps[3]; /* User velocity in east, north,", "in meters */ uint8 u_HorizontalReliability; /* Horizontal position reliability 0:", "derived from GNSS only solution */ uint32 q_SensorDataUsageMask; /* Denotes", "solution */ uint32 q_FltGnssHeadingUncRad; /* User heading uncertainty in radians", "latency; uint8_t predetect_interval; uint16_t postdetections; uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction; float", "(see DM log 0x1476 documentation) */ uint8 u_FailureCode; /* Failure", "of position in radians */ uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid", "nams] sz = calcsize(st) return lambda x: dict(zip(nams, unpack_from(st, x))),", "time bias in meters */ uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian", "*/ uint32 q_SensorDataUsageMask; /* Denotes which additional sensor data were", "dilution of precision as computed from the unweighted uint32 q_FltHdop;", "q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value for SFT offset in seconds", "the number of unique positions reported */ uint64 t_DblFinalPosLatLon[2]; /*", "altitude in meters as computed by WLS */ uint32 q_FltRawAltSigma;", "uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma value for filtered height-above-ellipsoid altitude", "computed from the unweighted least-squares covariance matrix */ uint32 q_FltVdop;", "[] i = 0 while i < len(nam): if nam[i]", "float clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; \"\"\" gps_measurement_report_sv", "in [\"int32\", \"int32_t\"]: st += \"i\" elif typ in [\"uint16\",", "- Reserved A bit set to 1 indicates that certain", "*/ uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis of final horizontal position", "+= \"B\" elif typ in [\"int8\", \"int8_t\"]: st += \"b\"", "/* Gaussian 1-sigma value for GPS to BeiDou time bias", "number of BeiDou SVs detected by searcher, including ones not", "uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value for GPS to BeiDou", "float unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t misc_status; uint32_t multipath_estimate; float azimuth;", "for raw height-above-ellipsoid altitude in meters */ uint32 align_Flt[14]; uint32", "sv_count; \"\"\" gps_measurement_report_sv = \"\"\" uint8_t sv_id; uint8_t observation_state; //", "= 0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488 LOG_GNSS_CONFIGURATION_STATE = 0x1516 glonass_measurement_report =", "/* 3D position dilution of precision as computed from the", "uint8 u_TotalBdsSvs; /* Total number of BeiDou SVs detected by", "year cycles */ uint16 w_GloNumDaysInFourYear; /* Glonass calendar day in", "postdetections; uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float unfiltered_speed; float", "*/ uint32 q_FltSftOffsetSec; /* SFT offset as computed by WLS", "in nam: cnt = int(nam.split(\"[\")[1].split(\"]\")[0]) st += st[-1]*(cnt-1) for i", "injected 4: Internal database */ uint32 q_Reserved1; /* Reserved memory", "q_FltFiltGBTBMeters; /* Filtered GPS to BeiDou time bias in meters", "of unique positions reported */ uint64 t_DblFinalPosLatLon[2]; /* Final latitude", "*/ uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value for GPS to", "as computed by WLS */ uint32 q_FltRawAltSigma; /* Gaussian 1-sigma", "uint32 q_FltHdop; /* Horizontal position dilution of precision as computed", "used in position calculation */ \"\"\" def name_to_camelcase(nam): ret =", "/* Filtered GPS to Glonass time bias in meters */", "[\"int16\", \"int16_t\"]: st += \"h\" elif typ == \"uint64\": st", "st += \"f\" elif typ == \"double\" or '_Dbl' in", "2 else: ret.append(nam[i]) i += 1 return ''.join(ret) def parse_struct(ss):", "*/ uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value for east, north,", "meters */ uint32 q_FltGGTBMeters; /* GPS to Glonass time bias", "position in radians */ uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude", "q_SensorAidMask; /* Denotes which component of the position report was", "/* User heading uncertainty in radians derived from GNSS only", "q_FltFiltGGTBMeters; /* Filtered GPS to Glonass time bias in meters", "SVs used in the fix */ uint8 u_TotalGpsSvs; /* Total", "[\"uint8\", \"uint8_t\"]: st += \"B\" elif typ in [\"int8\", \"int8_t\"]:", "meters. */ uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis of final horizontal", "this position fix. BIT[0] 0x00000001 <96> Accelerometer BIT[1] 0x00000002 <96>", "if nam[i] == \"_\": ret.append(nam[i+1].upper()) i += 2 else: ret.append(nam[i])", "for GPS to Glonass time bias uncertainty in meters */", "Horizontal position reliability 0: Not set 1: Very Low 2:", "uint16_t postdetections; uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float unfiltered_speed;", "= 0x1488 LOG_GNSS_CONFIGURATION_STATE = 0x1516 glonass_measurement_report = \"\"\" uint8_t version;", "\"H\" elif typ in [\"int16\", \"int16_t\"]: st += \"h\" elif", "unweighted uint32 q_FltHdop; /* Horizontal position dilution of precision as", "SFT offset as computed by WLS in seconds */ uint32", "of user velocity */ uint32 q_FltClockBiasMeters; /* Receiver clock bias", "in meters per second */ uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma", "u_PosSource; /* Source of position information */ /* 0: None", "pad; \"\"\" position_report = \"\"\" uint8 u_Version; /* Version number", "precision as computed from the unweighted least-squares covariance matrix */", "q_FltFilteredAltSigma; /* Gaussian 1-sigma value for filtered height-above-ellipsoid altitude in", "predetect_interval; uint16_t postdetections; uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float", "uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value for receiver clock bias", "clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; \"\"\" gps_measurement_report_sv = \"\"\" uint8_t", "to Glonass time bias uncertainty in meters */ uint32 q_FltFiltGGTBMeters;", "0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488 LOG_GNSS_CONFIGURATION_STATE = 0x1516", "elif typ == \"double\" or '_Dbl' in nam: st +=", "1 indicates that certain fields as defined by the SENSOR_AIDING_MASK", "WLS */ uint32 q_FltRawAltSigma; /* Gaussian 1-sigma value for raw", "value for east, north, up components of user velocity */", "were used to compute this position fix. BIT[0] 0x00000001 <96>", "float unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t misc_status; uint32_t multipath_estimate;", "0: Not set 1: Very Low 2: Low 3: Medium", "assert False if '[' in nam: cnt = int(nam.split(\"[\")[1].split(\"]\")[0]) st", "1-sigma value for SFT offset in seconds */ uint32 q_FltClockDriftMps;", "/* Final height-above-ellipsoid altitude of position */ uint32 q_FltHeadingRad; /*", "in meters */ uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma value", "uint32_t pad; \"\"\" gps_measurement_report = \"\"\" uint8_t version; uint32_t f_count;", "glonass_cycle_number; uint16_t glonass_number_of_days; uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float", "i += 1 return ''.join(ret) def parse_struct(ss): st = \"<\"", "Fix events bit field: (see DM log 0x1476 documentation) */", "was assisted with additional sensors defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001", "report was assisted with additional sensors defined in SENSOR_DATA_USAGE_MASK BIT[0]", "q_FltGnssHeadingRad; /* User heading in radians derived from GNSS only", "/* User velocity in east, north, up coordinate frame. In", "*/ uint32 q_Reserved1; /* Reserved memory field */ uint16 w_PosVelFlag;", "/* Glonass fix time of day in milliseconds */ uint32", "= 0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488 LOG_GNSS_CONFIGURATION_STATE =", "def name_to_camelcase(nam): ret = [] i = 0 while i", "uint32_t f_count; uint16_t week; uint32_t milliseconds; float time_bias; float clock_time_uncertainty;", "t_DblFinalPosLatLon[2]; /* Final latitude and longitude of position in radians", "units of meters. */ uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma value", "fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; \"\"\" position_report = \"\"\" uint8", "3D position dilution of precision as computed from the unweighted", "fields as defined by the SENSOR_AIDING_MASK were aided with sensor", "q_PosVelFlag2; /* Position velocity 2 bit field: (see DM log", "/* Total number of GPS SVs detected by searcher, including", "code: (see DM log 0x1476 documentation) */ uint16 w_FixEvents; /*", "elif typ in [\"int32\", \"int32_t\"]: st += \"i\" elif typ", "BeiDou SVs detected by searcher, including ones not used in", "The number of BeiDou SVs used in the fix */", "uint32 q_FltSftOffsetSec; /* SFT offset as computed by WLS in", "2: Low 3: Medium 4: High */ uint8 u_VerticalReliability; /*", "typ in [\"uint16\", \"uint16_t\"]: st += \"H\" elif typ in", "q_FltRawAltSigma; /* Gaussian 1-sigma value for raw height-above-ellipsoid altitude in", "of position */ uint32 q_GpsFixTimeMs; /* GPS fix time of", "calcsize LOG_GNSS_POSITION_REPORT = 0x1476 LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477 LOG_GNSS_CLOCK_REPORT = 0x1478", "while i < len(nam): if nam[i] == \"_\": ret.append(nam[i+1].upper()) i", "int16_t latency; uint8_t predetect_interval; uint16_t postdetections; uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction;", "of day in milliseconds */ uint32 q_PosCount; /* Integer count", "fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; \"\"\" gps_measurement_report = \"\"\" uint8_t", "*/ uint32 q_GpsFixTimeMs; /* GPS fix time of week of", "Glonass time bias in meters */ uint32 q_FltFiltGGTBSigmaMeters; /* Filtered", "Glonass time bias uncertainty in meters */ uint32 q_FltFiltGGTBMeters; /*", "LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488 LOG_GNSS_CONFIGURATION_STATE = 0x1516 glonass_measurement_report = \"\"\" uint8_t", "User heading uncertainty in radians */ uint32 q_FltVelEnuMps[3]; /* User", "/* 0: None 1: Weighted least-squares 2: Kalman filter 3:", "clock drift in meters per second */ uint32 q_FltFilteredAlt; /*", "meters */ uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value for GPS", "from struct import unpack_from, calcsize LOG_GNSS_POSITION_REPORT = 0x1476 LOG_GNSS_GPS_MEASUREMENT_REPORT =", "from GNSS only solution */ uint32 q_FltGnssHeadingUncRad; /* User heading", "q_FltBGTBMeters; /* BeiDou to Glonass time bias in meters */", "number of GPS SVs detected by searcher, including ones not", "searcher, including ones not used in position calculation */ \"\"\"", "uint16 w_GpsWeekNumber; /* GPS week number of position */ uint32", "else: ret.append(nam[i]) i += 1 return ''.join(ret) def parse_struct(ss): st", "data BIT[3] 0x00000008 <96> Velocity aided with sensor data 0xFFFFFFF0", "Semiminor axis of final horizontal position uncertainty error ellipse. In", "uncertainty in radians derived from GNSS only solution */ uint32", "uint32_t pad; \"\"\" position_report = \"\"\" uint8 u_Version; /* Version", "*/ uint32 q_PosCount; /* Integer count of the number of", "in nam: st += \"f\" elif typ == \"double\" or", "+= \"H\" elif typ in [\"int16\", \"int16_t\"]: st += \"h\"", "nams def dict_unpacker(ss, camelcase = False): st, nams = parse_struct(ss)", "aided with sensor data 0xFFFFFFF0 <96> Reserved */ uint8 u_NumGpsSvsUsed;", "uint32_t f_count; uint8_t glonass_cycle_number; uint16_t glonass_number_of_days; uint32_t milliseconds; float time_bias;", "number of DM log */ uint32 q_Fcount; /* Local millisecond", "position dilution of precision as computed from the unweighted uint32", "uint32 q_FltVdop; /* Vertical position dilution of precision as computed", "horizontal position uncertainty error ellipse. In units of meters. */", "+= st[-1]*(cnt-1) for i in range(cnt): nams.append(\"%s[%d]\" % (nam.split(\"[\")[0], i))", "user velocity */ uint32 q_FltClockBiasMeters; /* Receiver clock bias in", "uint8 u_EllipseConfidence; /* Statistical measure of the confidence (percentage) associated", "including ones not used in position calculation */ \"\"\" def", "number of unique positions reported */ uint64 t_DblFinalPosLatLon[2]; /* Final", "with increasing angles moving clockwise from North. In units of", "= parse_struct(ss) if camelcase: nams = [name_to_camelcase(x) for x in", "for receiver clock bias in meters */ uint32 q_FltGGTBMeters; /*", "q_FltGBTBMeters; /* GPS to BeiDou time bias in meters */", "SVs detected by searcher, including ones not used in position", "associated with the uncertainty ellipse values */ uint32 q_FltEllipseAngle; /*", "uint8_t sv_id; int8_t frequency_index; uint8_t observation_state; // SVObservationStates uint8_t observations;", "nams = parse_struct(ss) if camelcase: nams = [name_to_camelcase(x) for x", "clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; \"\"\" glonass_measurement_report_sv =", "l.split(\";\")[0].split() #print(typ, nam) if typ == \"float\" or '_Flt' in", "uint64 t_DblFinalPosLatLon[2]; /* Final latitude and longitude of position in", "in nam: st += \"d\" elif typ in [\"uint8\", \"uint8_t\"]:", "second. */ uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value for east,", "*/ uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value for clock drift", "pad; \"\"\" gps_measurement_report = \"\"\" uint8_t version; uint32_t f_count; uint16_t", "st += \"Q\" else: print(\"unknown type\", typ) assert False if", "SVs used in the fix */ uint8 u_TotalBdsSvs; /* Total", "Speed aided with sensor data BIT[2] 0x00000004 <96> Position aided", "compute this position fix. BIT[0] 0x00000001 <96> Accelerometer BIT[1] 0x00000002", "Glonass four year cycles */ uint16 w_GloNumDaysInFourYear; /* Glonass calendar", "*/ uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis of final horizontal position", "in radians */ uint32 q_FltVelEnuMps[3]; /* User velocity in east,", "uint32 q_FltRawAltSigma; /* Gaussian 1-sigma value for raw height-above-ellipsoid altitude", "in range(cnt): nams.append(\"%s[%d]\" % (nam.split(\"[\")[0], i)) else: nams.append(nam) return st,", "q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to Glonass", "/* Position velocity bit field: (see DM log 0x1476 documentation)", "offset as computed by WLS in seconds */ uint32 q_FltSftOffsetSigmaSec;", "nam: st += \"d\" elif typ in [\"uint8\", \"uint8_t\"]: st", "measure of the confidence (percentage) associated with the uncertainty ellipse", "+= \"f\" elif typ == \"double\" or '_Dbl' in nam:", "uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude of position */ uint32", "number of Glonass SVs detected by searcher, including ones not", "day in milliseconds */ uint32 q_PosCount; /* Integer count of", "aided with sensor data BIT[2] 0x00000004 <96> Position aided with", "Gaussian 1-sigma value for GPS to Glonass time bias uncertainty", "only solution */ uint32 q_FltGnssHeadingUncRad; /* User heading uncertainty in", "0x00000002 <96> Gyro 0x0000FFFC - Reserved A bit set to", "in radians */ uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude of", "computed from the unweighted uint32 q_FltHdop; /* Horizontal position dilution", "confidence (percentage) associated with the uncertainty ellipse values */ uint32", "q_FltEllipseSemimajorAxis; /* Semimajor axis of final horizontal position uncertainty error", "/* Total number of BeiDou SVs detected by searcher, including", "in meters */ uint32 align_Flt[14]; uint32 q_FltPdop; /* 3D position", "log 0x1476 documentation) */ uint16 w_FixEvents; /* Fix events bit", "3: Medium 4: High */ uint8 u_VerticalReliability; /* Vertical position", "2: Kalman filter 3: Externally injected 4: Internal database */", "carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t", "/* GPS fix time of week of in milliseconds */", "for l in ss.strip().split(\"\\n\"): typ, nam = l.split(\";\")[0].split() #print(typ, nam)", "0x1476 LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477 LOG_GNSS_CLOCK_REPORT = 0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480", "nam[i] == \"_\": ret.append(nam[i+1].upper()) i += 2 else: ret.append(nam[i]) i", "field: (see DM log 0x1476 documentation) */ uint8 u_FailureCode; /*", "number of BeiDou SVs used in the fix */ uint8", "measurement_status; uint8_t misc_status; uint32_t multipath_estimate; float azimuth; float elevation; int32_t", "true North, with increasing angles moving clockwise from North. In", "of meters. */ uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma value for", "uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou to Glonass time bias in", "/* Fix events bit field: (see DM log 0x1476 documentation)", "(clock frequency bias) in meters per second */ uint32 q_FltClockDriftSigmaMps;", "DM log 0x1476 documentation) */ uint32 q_PosVelFlag2; /* Position velocity", "(percentage) associated with the uncertainty ellipse values */ uint32 q_FltEllipseAngle;", "q_SensorDataUsageMask; /* Denotes which additional sensor data were used to", "else: nams.append(nam) return st, nams def dict_unpacker(ss, camelcase = False):", "\"\"\" glonass_measurement_report_sv = \"\"\" uint8_t sv_id; int8_t frequency_index; uint8_t observation_state;", "hemming_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval; uint16_t", "bias) in meters per second */ uint32 q_FltClockDriftSigmaMps; /* Gaussian", "log 0x1476 documentation) */ uint8 u_FailureCode; /* Failure code: (see", "+= 2 else: ret.append(nam[i]) i += 1 return ''.join(ret) def", "ones not used in position calculation */ uint8 u_NumGloSvsUsed; /*", "= 0x1516 glonass_measurement_report = \"\"\" uint8_t version; uint32_t f_count; uint8_t", "uint8 u_TotalGpsSvs; /* Total number of GPS SVs detected by", "uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis of final horizontal position uncertainty", "computed by WLS */ uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma value", "uint16 w_Reserved2; /* Reserved memory field */ uint32 q_FltGnssHeadingRad; /*", "position height-above-ellipsoid altitude in meters */ uint8 u_HorizontalReliability; /* Horizontal", "0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886", "in meters */ uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma value", "*/ uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma value for final position", "st += st[-1]*(cnt-1) for i in range(cnt): nams.append(\"%s[%d]\" % (nam.split(\"[\")[0],", "meters */ uint32 q_FltFiltGGTBMeters; /* Filtered GPS to Glonass time", "float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; \"\"\" glonass_measurement_report_sv = \"\"\"", "(see DM log 0x1476 documentation) */ uint32 _fake_align_week_number; uint16 w_GpsWeekNumber;", "/* Semiminor axis of final horizontal position uncertainty error ellipse.", "*/ uint8 u_TotalGloSvs; /* Total number of Glonass SVs detected", "= 0x14E1 LOG_GNSS_ME_DPO_STATUS = 0x1838 LOG_GNSS_CD_DB_REPORT = 0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT =", "uint32 q_Fcount; /* Local millisecond counter */ uint8 u_PosSource; /*", "meters */ uint32 q_FltRawAlt; /* Raw height-above-ellipsoid altitude in meters", "Glonass calendar day in four year cycle */ uint32 q_GloFixTimeMs;", "< len(nam): if nam[i] == \"_\": ret.append(nam[i+1].upper()) i += 2", "uint16_t week; uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float clock_frequency_bias;", "reported */ uint64 t_DblFinalPosLatLon[2]; /* Final latitude and longitude of", "value for clock drift in meters per second */ uint32", "unweighted least-squares covariance matrix */ uint32 q_FltVdop; /* Vertical position", "= [] for l in ss.strip().split(\"\\n\"): typ, nam = l.split(\";\")[0].split()", "BeiDou to Glonass time bias in meters */ uint32 q_FltBGTBSigmaMeters;", "in meters */ uint32 q_FltFiltGBTBMeters; /* Filtered GPS to BeiDou", "\"\"\" def name_to_camelcase(nam): ret = [] i = 0 while", "glonass_number_of_days; uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float clock_frequency_bias; float", "time bias in meters */ uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma", "uint32 q_GpsFixTimeMs; /* GPS fix time of week of in", "uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude in meters as computed", "sensors defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading aided with", "fix time of week of in milliseconds */ uint8 u_GloNumFourYear;", "uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval; uint16_t postdetections;", "Horizontal position dilution of precision as computed from the unweighted", "uncertainty in radians */ uint32 q_FltVelEnuMps[3]; /* User velocity in", "WLS */ uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma value for filtered", "Filtered GPS to Glonass time bias in meters */ uint32", "of the confidence (percentage) associated with the uncertainty ellipse values", "*/ uint8 u_FailureCode; /* Failure code: (see DM log 0x1476", "time bias uncertainty in meters */ uint32 q_FltFiltGBTBMeters; /* Filtered", "float elevation; int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float fine_speed; float fine_speed_uncertainty;", "drift in meters per second */ uint32 q_FltFilteredAlt; /* Filtered", "azimuth; float elevation; int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float fine_speed; float", "uint8_t glonass_cycle_number; uint16_t glonass_number_of_days; uint32_t milliseconds; float time_bias; float clock_time_uncertainty;", "to Glonass time bias in meters */ uint32 q_FltBGTBSigmaMeters; /*", "not used in position calculation */ uint8 u_NumGloSvsUsed; /* The", "meters. */ uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma value for final" ]
[ "super(ContentBrowser, self).__init__(parent) self.setWindowTitle('Content Browser') self.setProperty('houdiniStyle', True) topLayout = QHBoxLayout() topLayout.setContentsMargins(4,", "value, reset=True): self.defaultValue = value if reset: self.revertToDefault() def mousePressEvent(self,", "except ImportError: from PySide2.QtWidgets import * from PySide2.QtGui import *", "orientation=Qt.Horizontal, parent=None): super(Slider, self).__init__(orientation, parent) self.defaultValue = 0 self.valueLadderMode =", "self).__init__(parent) self.setWindowTitle('Content Browser') self.setProperty('houdiniStyle', True) topLayout = QHBoxLayout() topLayout.setContentsMargins(4, 4,", "self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search by Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18)) self.patternSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.patternSearchButton)", "QStackedLayout(middleLayout) model = QFileSystemModel() model.setRootPath('C:/') treeView = BrowserTreeView() treeView.setModel(model) treeView.setRootIndex(model.index('C:/'))", "QPushButton() settingsButton.setFixedSize(26, 26) settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18)) settingsButton.setIconSize(QSize(18, 18)) bottomLayout.addWidget(settingsButton)", "hint event = QMouseEvent if not self.valueLadderMode and event.buttons() ==", "PyQt5.QtGui import * from PyQt5.QtCore import * except ImportError: from", "def eventFilter(self, watched, event): if False: # Type hint watched", "by Regular Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18)) self.regexSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton)", "setDefaultValue(self, value, reset=True): self.defaultValue = value if reset: self.revertToDefault() def", "Type hint event = QMouseEvent if not self.valueLadderMode and event.buttons()", "18)) self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton) self.tableViewButton = QPushButton() self.tableViewButton.setFixedSize(26, 26) self.tableViewButton.setToolTip('Table", "shortcut='Ctrl+2')) bottomLayout = QHBoxLayout() bottomLayout.setContentsMargins(4, 0, 4, 4) bottomLayout.setSpacing(2) settingsButton", "key == Qt.Key_Minus: pass elif mod == Qt.ControlModifier and key", "self.fuzzySearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton = QPushButton() self.patternSearchButton.setFixedSize(26, 26) self.patternSearchButton.setCheckable(True)", "import createAction def isRevertToDefaultEvent(event): return event.modifiers() == Qt.ControlModifier and event.button()", "= SearchField() self.searchField.setToolTip('Search\\tCtrl+F, F3') topLayout.addWidget(self.searchField) searchModeButtonGroup = QButtonGroup(self) searchModeButtonGroup.setExclusive(True) self.wholeSearchButton", "0, QSizePolicy.Expanding, QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer) self.scaleSlider = Slider() self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda", "= QMouseEvent if event.button() == Qt.MiddleButton: return elif event.button() ==", "mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout) def switchToTreeView(self): self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide() self.treeViewButton.setChecked(True) def switchToTableView(self): self.viewLayout.setCurrentIndex(1)", "== Qt.Key_Equal: pass elif mod == Qt.ControlModifier and key ==", "Qt.NoModifier and key == Qt.Key_Escape: self.clearEditText() else: super(SearchField, self).keyPressEvent(event) def", "Type hint sep = QFrame sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep) viewModeButtonGroup =", "hidePopup(self): super(SearchField, self).hidePopup() self.lineEdit().setFocus() link_or_state_icon = 'BUTTONS_link' embedded_icon = 'BUTTONS_pinned'", "QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer) self.scaleSlider = Slider() self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120,", "self.refreshButton.setIconSize(QSize(18, 18)) topLayout.addWidget(self.refreshButton) sep = hou.qt.Separator() if False: # Type", "# Type hint watched = QObject event = QEvent if", "if mod == Qt.NoModifier and key == Qt.Key_Escape: self.clearEditText() else:", "class BrowserTreeView(QTreeView): def __init__(self, parent=None): super(BrowserTreeView, self).__init__(parent) self.setAlternatingRowColors(True) class BrowserTableView(QListView):", "= False def revertToDefault(self): self.setValue(self.defaultValue) def setDefaultValue(self, value, reset=True): self.defaultValue", "Type hint event = QKeyEvent key = event.key() mod =", "else: self.valueLadderMode = True elif self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(), event.globalY(), bool(event.modifiers() &", "self.patternSearchButton.setFixedSize(26, 26) self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search by Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18)) self.patternSearchButton.setIconSize(QSize(18,", "= QMouseEvent if not self.valueLadderMode and event.buttons() == Qt.MiddleButton: try:", "edit = self.lineEdit() edit.setPlaceholderText('Search...') edit.installEventFilter(self) edit.setFont(QFont('Segoe UI')) self.setFixedHeight(26) comp =", "switchToTreeView(self): self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide() self.treeViewButton.setChecked(True) def switchToTableView(self): self.viewLayout.setCurrentIndex(1) self.scaleSlider.show() self.tableViewButton.setChecked(True) def", "Type hint watched = QObject event = QEvent if watched", "bottomLayout.setContentsMargins(4, 0, 4, 4) bottomLayout.setSpacing(2) settingsButton = QPushButton() settingsButton.setFixedSize(26, 26)", "settingsButton.setFixedSize(26, 26) settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18)) settingsButton.setIconSize(QSize(18, 18)) bottomLayout.addWidget(settingsButton) spacer", "if reset: self.revertToDefault() def mousePressEvent(self, event): if False: # Type", "elif mod == Qt.NoModifier and key == Qt.Key_F3: self.searchField.setFocus() elif", "try: hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int) except hou.OperationFailed: return else: self.valueLadderMode =", "self.setProperty('houdiniStyle', True) topLayout = QHBoxLayout() topLayout.setContentsMargins(4, 4, 4, 2) topLayout.setSpacing(2)", "viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep) self.searchField = SearchField() self.searchField.setToolTip('Search\\tCtrl+F, F3') topLayout.addWidget(self.searchField) searchModeButtonGroup", "super(BrowserTableView, self).__init__(parent) self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120, 90)) self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu)", "bottomLayout.setSpacing(2) settingsButton = QPushButton() settingsButton.setFixedSize(26, 26) settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18))", "BrowserTableView(QListView): def __init__(self, parent=None): super(BrowserTableView, self).__init__(parent) self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel)", "== Qt.NoModifier and key == Qt.Key_Escape: self.clearEditText() else: super(SearchField, self).keyPressEvent(event)", "* from PySide2.QtCore import * import hou from hammer_tools.utils import", "from PySide2.QtCore import * import hou from hammer_tools.utils import createAction", "mainLayout.setSpacing(4) mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout) def switchToTreeView(self): self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide() self.treeViewButton.setChecked(True) def", "False: # Type hint event = QMouseEvent if not self.valueLadderMode", "Qt.MiddleButton, Qt.NoModifier) super(Slider, self).mousePressEvent(event) def mouseMoveEvent(self, event): if False: #", "event.button() == Qt.MiddleButton class Slider(QSlider): def __init__(self, orientation=Qt.Horizontal, parent=None): super(Slider,", "QPushButton() self.treeViewButton.setFixedSize(26, 26) self.treeViewButton.setToolTip('Tree View\\t\\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18)) self.treeViewButton.setIconSize(QSize(18, 18))", "event.button() == Qt.MiddleButton: return elif event.button() == Qt.LeftButton: event =", "* v / 100)) bottomLayout.addWidget(self.scaleSlider) mainLayout = QVBoxLayout(self) mainLayout.setContentsMargins(0, 0,", "v / 100)) bottomLayout.addWidget(self.scaleSlider) mainLayout = QVBoxLayout(self) mainLayout.setContentsMargins(0, 0, 0,", "== '__main__': app = QApplication([]) window = ContentBrowser() window.show() app.exec_()", "return else: self.valueLadderMode = True elif self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(), event.globalY(), bool(event.modifiers()", "topLayout.addWidget(self.searchField) searchModeButtonGroup = QButtonGroup(self) searchModeButtonGroup.setExclusive(True) self.wholeSearchButton = QPushButton() self.wholeSearchButton.setFixedSize(26, 26)", "elif mod == Qt.ControlModifier and key == Qt.Key_2: pass elif", "QSizePolicy.Expanding, QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer) self.scaleSlider = Slider() self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda v:", "Qt.NoModifier and key == Qt.Key_F1: pass else: super(ContentBrowser, self).keyPressEvent(event) if", "Type hint event = QMouseEvent if self.valueLadderMode and event.button() ==", "def setDefaultValue(self, value, reset=True): self.defaultValue = value if reset: self.revertToDefault()", "0, 0, 0) mainLayout.setSpacing(4) mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout) def switchToTreeView(self): self.viewLayout.setCurrentIndex(0)", "mod == Qt.ControlModifier and key == Qt.Key_Equal: pass elif mod", "elif mod == Qt.ControlModifier and key == Qt.Key_F: self.searchField.setFocus() elif", "elif self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(), event.globalY(), bool(event.modifiers() & Qt.AltModifier), bool(event.modifiers() & Qt.ShiftModifier))", "import * import hou from hammer_tools.utils import createAction def isRevertToDefaultEvent(event):", "class Slider(QSlider): def __init__(self, orientation=Qt.Horizontal, parent=None): super(Slider, self).__init__(orientation, parent) self.defaultValue", "self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2')) bottomLayout = QHBoxLayout() bottomLayout.setContentsMargins(4,", "Regular Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18)) self.regexSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep)", "Qt.Key_F: self.searchField.setFocus() elif mod == Qt.NoModifier and key == Qt.Key_F3:", "0, 0, 4) middleLayout.setSpacing(4) self.viewLayout = QStackedLayout(middleLayout) model = QFileSystemModel()", "import * from PySide2.QtCore import * import hou from hammer_tools.utils", "View', self.switchToTableView, shortcut='Ctrl+2')) bottomLayout = QHBoxLayout() bottomLayout.setContentsMargins(4, 0, 4, 4)", "self.fuzzySearchButton = QPushButton() self.fuzzySearchButton.setFixedSize(26, 26) self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith',", "__init__(self, parent=None): super(SearchField, self).__init__(parent) self.setEditable(True) edit = self.lineEdit() edit.setPlaceholderText('Search...') edit.installEventFilter(self)", "topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep) self.searchField = SearchField() self.searchField.setToolTip('Search\\tCtrl+F, F3') topLayout.addWidget(self.searchField) searchModeButtonGroup =", "settingsButton.setIconSize(QSize(18, 18)) bottomLayout.addWidget(settingsButton) spacer = QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer)", "elif mod == Qt.NoModifier and key == Qt.Key_F1: pass else:", "from hammer_tools.utils import createAction def isRevertToDefaultEvent(event): return event.modifiers() == Qt.ControlModifier", "= QHBoxLayout() middleLayout.setContentsMargins(4, 0, 0, 4) middleLayout.setSpacing(4) self.viewLayout = QStackedLayout(middleLayout)", "word search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18)) self.wholeSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton", "4, 4) bottomLayout.setSpacing(2) settingsButton = QPushButton() settingsButton.setFixedSize(26, 26) settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini',", "event): if False: # Type hint watched = QObject event", "QEvent if watched == self.lineEdit(): if event.type() == QEvent.MouseButtonRelease and", "return event.modifiers() == Qt.ControlModifier and event.button() == Qt.MiddleButton class Slider(QSlider):", "middleLayout.setContentsMargins(4, 0, 0, 4) middleLayout.setSpacing(4) self.viewLayout = QStackedLayout(middleLayout) model =", "self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self, 'Table View',", "BrowserTreeView() treeView.setModel(model) treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView) tableView = BrowserTableView() tableView.setModel(model) tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel())", "if mod == Qt.NoModifier and key == Qt.Key_F5: pass elif", "Type hint event = QMouseEvent if event.button() == Qt.MiddleButton: return", "Qt.Key_Escape: self.clearEditText() else: super(SearchField, self).keyPressEvent(event) def hidePopup(self): super(SearchField, self).hidePopup() self.lineEdit().setFocus()", "event): if False: # Type hint event = QMouseEvent if", "pass elif mod == Qt.ControlModifier and key == Qt.Key_1: pass", "self).mouseReleaseEvent(event) class SearchField(QComboBox): def __init__(self, parent=None): super(SearchField, self).__init__(parent) self.setEditable(True) edit", "self.lineEdit().setFocus() link_or_state_icon = 'BUTTONS_link' embedded_icon = 'BUTTONS_pinned' class BrowserMode(QStandardItemModel): def", "Type hint event = QMouseEvent if isRevertToDefaultEvent(event): self.clearEditText() def eventFilter(self,", "event = QMouseEvent if self.valueLadderMode and event.button() == Qt.MiddleButton: hou.ui.closeValueLadder()", "= self.lineEdit() edit.setPlaceholderText('Search...') edit.installEventFilter(self) edit.setFont(QFont('Segoe UI')) self.setFixedHeight(26) comp = self.completer()", "Qt.MiddleButton: hou.ui.closeValueLadder() self.valueLadderMode = False elif isRevertToDefaultEvent(event): self.revertToDefault() else: super(Slider,", "mousePressEvent(self, event): if False: # Type hint event = QMouseEvent", "18)) self.fuzzySearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton = QPushButton() self.patternSearchButton.setFixedSize(26, 26)", "18, 18)) self.patternSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton) self.regexSearchButton = QPushButton() self.regexSearchButton.setFixedSize(26,", "watched = QObject event = QEvent if watched == self.lineEdit():", "return False def keyPressEvent(self, event): if False: # Type hint", "elif mod == Qt.ControlModifier and key == Qt.Key_Equal: pass elif", "event): if False: # Type hint event = QKeyEvent key", "self.clearEditText() event.accept() return True return False def keyPressEvent(self, event): if", "and key == Qt.Key_F5: pass elif mod == Qt.ControlModifier and", "4) bottomLayout.setSpacing(2) settingsButton = QPushButton() settingsButton.setFixedSize(26, 26) settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18,", "self.treeViewButton.setFixedSize(26, 26) self.treeViewButton.setToolTip('Tree View\\t\\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18)) self.treeViewButton.setIconSize(QSize(18, 18)) self.treeViewButton.setCheckable(True)", "QEvent.MouseButtonRelease and isRevertToDefaultEvent(event): self.clearEditText() event.accept() return True return False def", "QMouseEvent if isRevertToDefaultEvent(event): self.clearEditText() def eventFilter(self, watched, event): if False:", "18)) searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\\tF1')) middleLayout = QHBoxLayout()", "= QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer) self.scaleSlider = Slider() self.scaleSlider.setDefaultValue(50)", "self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18)) self.fuzzySearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.fuzzySearchButton)", "False: # Type hint sep = QFrame sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep)", "revertToDefault(self): self.setValue(self.defaultValue) def setDefaultValue(self, value, reset=True): self.defaultValue = value if", "elif mod == Qt.ControlModifier and key == Qt.Key_Minus: pass elif", "treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView) tableView = BrowserTableView() tableView.setModel(model) tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1)", "parent=None): super(BrowserTableView, self).__init__(parent) self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120, 90)) self.setUniformItemSizes(True)", "elif mod == Qt.ControlModifier and key == Qt.Key_1: pass elif", "reset=True): self.defaultValue = value if reset: self.revertToDefault() def mousePressEvent(self, event):", "bool(event.modifiers() & Qt.ShiftModifier)) else: super(Slider, self).mouseMoveEvent(event) def mouseReleaseEvent(self, event): if", "= QEvent if watched == self.lineEdit(): if event.type() == QEvent.MouseButtonRelease", "bottomLayout = QHBoxLayout() bottomLayout.setContentsMargins(4, 0, 4, 4) bottomLayout.setSpacing(2) settingsButton =", "from PySide2.QtWidgets import * from PySide2.QtGui import * from PySide2.QtCore", "import print_function try: from PyQt5.QtWidgets import * from PyQt5.QtGui import", "self.lineEdit() edit.setPlaceholderText('Search...') edit.installEventFilter(self) edit.setFont(QFont('Segoe UI')) self.setFixedHeight(26) comp = self.completer() comp.setCompletionMode(QCompleter.PopupCompletion)", "import * except ImportError: from PySide2.QtWidgets import * from PySide2.QtGui", "& Qt.AltModifier), bool(event.modifiers() & Qt.ShiftModifier)) else: super(Slider, self).mouseMoveEvent(event) def mouseReleaseEvent(self,", "Qt.ControlModifier and event.button() == Qt.MiddleButton class Slider(QSlider): def __init__(self, orientation=Qt.Horizontal,", "event.globalY(), bool(event.modifiers() & Qt.AltModifier), bool(event.modifiers() & Qt.ShiftModifier)) else: super(Slider, self).mouseMoveEvent(event)", "= comp.popup() popup.setStyleSheet(hou.qt.styleSheet()) def mouseReleaseEvent(self, event): if False: # Type", "__init__(self, parent=None): super(ContentBrowser, self).__init__(parent) self.setWindowTitle('Content Browser') self.setProperty('houdiniStyle', True) topLayout =", "self.tableViewButton.setCheckable(True) self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep) self.searchField = SearchField() self.searchField.setToolTip('Search\\tCtrl+F, F3')", "treeView = BrowserTreeView() treeView.setModel(model) treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView) tableView = BrowserTableView() tableView.setModel(model)", "Qt.NoModifier and key == Qt.Key_F3: self.searchField.setFocus() elif mod == Qt.ControlModifier", "__future__ import print_function try: from PyQt5.QtWidgets import * from PyQt5.QtGui", "False def keyPressEvent(self, event): if False: # Type hint event", "self.patternSearchButton.setToolTip('Search by Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18)) self.patternSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton)", "self.completer() comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5) popup = comp.popup() popup.setStyleSheet(hou.qt.styleSheet()) def", "mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout) def switchToTreeView(self): self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide() self.treeViewButton.setChecked(True) def switchToTableView(self):", "Qt.NoModifier) super(Slider, self).mousePressEvent(event) def mouseMoveEvent(self, event): if False: # Type", "edit.installEventFilter(self) edit.setFont(QFont('Segoe UI')) self.setFixedHeight(26) comp = self.completer() comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel)", "sep = hou.qt.Separator() if False: # Type hint sep =", "= True elif self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(), event.globalY(), bool(event.modifiers() & Qt.AltModifier), bool(event.modifiers()", "comp.popup() popup.setStyleSheet(hou.qt.styleSheet()) def mouseReleaseEvent(self, event): if False: # Type hint", "settingsButton = QPushButton() settingsButton.setFixedSize(26, 26) settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18)) settingsButton.setIconSize(QSize(18,", "self.treeViewButton = QPushButton() self.treeViewButton.setFixedSize(26, 26) self.treeViewButton.setToolTip('Tree View\\t\\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18))", "== Qt.Key_F1: pass else: super(ContentBrowser, self).keyPressEvent(event) if __name__ == '__main__':", "PySide2.QtGui import * from PySide2.QtCore import * import hou from", "self.setFixedHeight(26) comp = self.completer() comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5) popup =", "from PyQt5.QtWidgets import * from PyQt5.QtGui import * from PyQt5.QtCore", "def mouseMoveEvent(self, event): if False: # Type hint event =", "self.wholeSearchButton.setFixedSize(26, 26) self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole word search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18)) self.wholeSearchButton.setIconSize(QSize(18,", "mouseMoveEvent(self, event): if False: # Type hint event = QMouseEvent", "and event.buttons() == Qt.MiddleButton: try: hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int) except hou.OperationFailed:", "QPushButton() self.wholeSearchButton.setFixedSize(26, 26) self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole word search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18))", "and key == Qt.Key_1: pass elif mod == Qt.ControlModifier and", "self.searchField.setToolTip('Search\\tCtrl+F, F3') topLayout.addWidget(self.searchField) searchModeButtonGroup = QButtonGroup(self) searchModeButtonGroup.setExclusive(True) self.wholeSearchButton = QPushButton()", "event = QKeyEvent key = event.key() mod = event.modifiers() if", "def __init__(self, parent=None): super(SearchField, self).__init__(parent) self.setEditable(True) edit = self.lineEdit() edit.setPlaceholderText('Search...')", "self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120, 90)) self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu) class ContentBrowser(QWidget):", "False: # Type hint event = QMouseEvent if event.button() ==", "= QObject event = QEvent if watched == self.lineEdit(): if", "self.wholeSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton = QPushButton() self.fuzzySearchButton.setFixedSize(26, 26) self.fuzzySearchButton.setCheckable(True)", "hint event = QMouseEvent if self.valueLadderMode and event.button() == Qt.MiddleButton:", "== Qt.Key_Minus: pass elif mod == Qt.ControlModifier and key ==", "sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep) viewModeButtonGroup = QButtonGroup(self) viewModeButtonGroup.setExclusive(True) self.treeViewButton = QPushButton()", "self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search by Regular Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18)) self.regexSearchButton.setIconSize(QSize(18, 18))", "# Type hint event = QMouseEvent if isRevertToDefaultEvent(event): self.clearEditText() def", "QMouseEvent(QEvent.MouseButtonPress, event.pos(), Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier) super(Slider, self).mousePressEvent(event) def mouseMoveEvent(self, event):", "18, 18)) self.fuzzySearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton = QPushButton() self.patternSearchButton.setFixedSize(26,", "== Qt.ControlModifier and key == Qt.Key_Minus: pass elif mod ==", "if False: # Type hint event = QMouseEvent if event.button()", "== Qt.ControlModifier and key == Qt.Key_F: self.searchField.setFocus() elif mod ==", "and isRevertToDefaultEvent(event): self.clearEditText() event.accept() return True return False def keyPressEvent(self,", "= QStackedLayout(middleLayout) model = QFileSystemModel() model.setRootPath('C:/') treeView = BrowserTreeView() treeView.setModel(model)", "'BUTTONS_link' embedded_icon = 'BUTTONS_pinned' class BrowserMode(QStandardItemModel): def __init__(self): super(BrowserMode, self).__init__()", "self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18)) self.patternSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton) self.regexSearchButton = QPushButton()", "def switchToTreeView(self): self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide() self.treeViewButton.setChecked(True) def switchToTableView(self): self.viewLayout.setCurrentIndex(1) self.scaleSlider.show() self.tableViewButton.setChecked(True)", "from PyQt5.QtGui import * from PyQt5.QtCore import * except ImportError:", "self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120, 90)) self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu) class ContentBrowser(QWidget): def __init__(self, parent=None):", "and event.button() == Qt.MiddleButton: hou.ui.closeValueLadder() self.valueLadderMode = False elif isRevertToDefaultEvent(event):", "QButtonGroup(self) viewModeButtonGroup.setExclusive(True) self.treeViewButton = QPushButton() self.treeViewButton.setFixedSize(26, 26) self.treeViewButton.setToolTip('Tree View\\t\\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree',", "= event.modifiers() if mod == Qt.NoModifier and key == Qt.Key_F5:", "self.treeViewButton.setToolTip('Tree View\\t\\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18)) self.treeViewButton.setIconSize(QSize(18, 18)) self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton)", "topLayout.addWidget(self.treeViewButton) self.tableViewButton = QPushButton() self.tableViewButton.setFixedSize(26, 26) self.tableViewButton.setToolTip('Table View\\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18,", "if False: # Type hint watched = QObject event =", "self).mouseMoveEvent(event) def mouseReleaseEvent(self, event): if False: # Type hint event", "def mouseReleaseEvent(self, event): if False: # Type hint event =", "# Type hint event = QKeyEvent key = event.key() mod", "if __name__ == '__main__': app = QApplication([]) window = ContentBrowser()", "if False: # Type hint event = QMouseEvent if self.valueLadderMode", "else: super(Slider, self).mouseReleaseEvent(event) class SearchField(QComboBox): def __init__(self, parent=None): super(SearchField, self).__init__(parent)", "True elif self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(), event.globalY(), bool(event.modifiers() & Qt.AltModifier), bool(event.modifiers() &", "self.scaleSlider.hide() self.treeViewButton.setChecked(True) def switchToTableView(self): self.viewLayout.setCurrentIndex(1) self.scaleSlider.show() self.tableViewButton.setChecked(True) def keyPressEvent(self, event):", "model.setRootPath('C:/') treeView = BrowserTreeView() treeView.setModel(model) treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView) tableView = BrowserTableView()", "__init__(self): super(BrowserMode, self).__init__() class BrowserTreeView(QTreeView): def __init__(self, parent=None): super(BrowserTreeView, self).__init__(parent)", "* from PyQt5.QtGui import * from PyQt5.QtCore import * except", "# Type hint event = QMouseEvent if event.button() == Qt.MiddleButton:", "if isRevertToDefaultEvent(event): self.clearEditText() def eventFilter(self, watched, event): if False: #", "self.setIconSize(QSize(120, 90)) self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu) class ContentBrowser(QWidget): def __init__(self, parent=None): super(ContentBrowser,", "4, 4, 2) topLayout.setSpacing(2) self.refreshButton = QPushButton() self.refreshButton.setFixedSize(26, 26) self.refreshButton.setToolTip('Update\\tF5')", "Qt.Key_Minus: pass elif mod == Qt.ControlModifier and key == Qt.Key_1:", "= event.key() mod = event.modifiers() if mod == Qt.NoModifier and", "treeView.setModel(model) treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView) tableView = BrowserTableView() tableView.setModel(model) tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView)", "self.valueLadderMode = False elif isRevertToDefaultEvent(event): self.revertToDefault() else: super(Slider, self).mouseReleaseEvent(event) class", "key == Qt.Key_F5: pass elif mod == Qt.ControlModifier and key", "= QMouseEvent if self.valueLadderMode and event.button() == Qt.MiddleButton: hou.ui.closeValueLadder() self.valueLadderMode", "event.pos(), Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier) super(Slider, self).mousePressEvent(event) def mouseMoveEvent(self, event): if", "self.valueLadderMode and event.buttons() == Qt.MiddleButton: try: hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int) except", "self.viewLayout.setCurrentIndex(1) self.scaleSlider.show() self.tableViewButton.setChecked(True) def keyPressEvent(self, event): if False: # Type", "viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton) self.tableViewButton = QPushButton() self.tableViewButton.setFixedSize(26, 26) self.tableViewButton.setToolTip('Table View\\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette',", "* import hou from hammer_tools.utils import createAction def isRevertToDefaultEvent(event): return", "hou.ui.closeValueLadder() self.valueLadderMode = False elif isRevertToDefaultEvent(event): self.revertToDefault() else: super(Slider, self).mouseReleaseEvent(event)", "pass elif mod == Qt.ControlModifier and key == Qt.Key_2: pass", "self.clearEditText() def eventFilter(self, watched, event): if False: # Type hint", "= BrowserTableView() tableView.setModel(model) tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self, 'Tree", "class BrowserTableView(QListView): def __init__(self, parent=None): super(BrowserTableView, self).__init__(parent) self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection)", "self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu) class ContentBrowser(QWidget): def __init__(self, parent=None): super(ContentBrowser, self).__init__(parent) self.setWindowTitle('Content", "comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5) popup = comp.popup() popup.setStyleSheet(hou.qt.styleSheet()) def mouseReleaseEvent(self, event): if", "event.modifiers() if mod == Qt.NoModifier and key == Qt.Key_F5: pass", "self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120, 90)) self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu) class ContentBrowser(QWidget): def __init__(self,", "Slider(QSlider): def __init__(self, orientation=Qt.Horizontal, parent=None): super(Slider, self).__init__(orientation, parent) self.defaultValue =", "26) self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search by Regular Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18)) self.regexSearchButton.setIconSize(QSize(18,", "QHBoxLayout() bottomLayout.setContentsMargins(4, 0, 4, 4) bottomLayout.setSpacing(2) settingsButton = QPushButton() settingsButton.setFixedSize(26,", "QFrame sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep) viewModeButtonGroup = QButtonGroup(self) viewModeButtonGroup.setExclusive(True) self.treeViewButton =", "def isRevertToDefaultEvent(event): return event.modifiers() == Qt.ControlModifier and event.button() == Qt.MiddleButton", "BrowserTreeView(QTreeView): def __init__(self, parent=None): super(BrowserTreeView, self).__init__(parent) self.setAlternatingRowColors(True) class BrowserTableView(QListView): def", "QPushButton() self.patternSearchButton.setFixedSize(26, 26) self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search by Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18))", "tableView.setIconSize(QSize(120, 90) * v / 100)) bottomLayout.addWidget(self.scaleSlider) mainLayout = QVBoxLayout(self)", "QButtonGroup(self) searchModeButtonGroup.setExclusive(True) self.wholeSearchButton = QPushButton() self.wholeSearchButton.setFixedSize(26, 26) self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole word", "18)) topLayout.addWidget(self.refreshButton) sep = hou.qt.Separator() if False: # Type hint", "= 0 self.valueLadderMode = False def revertToDefault(self): self.setValue(self.defaultValue) def setDefaultValue(self,", "mod == Qt.ControlModifier and key == Qt.Key_1: pass elif mod", "26) settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18)) settingsButton.setIconSize(QSize(18, 18)) bottomLayout.addWidget(settingsButton) spacer =", "viewModeButtonGroup = QButtonGroup(self) viewModeButtonGroup.setExclusive(True) self.treeViewButton = QPushButton() self.treeViewButton.setFixedSize(26, 26) self.treeViewButton.setToolTip('Tree", "18, 18)) self.treeViewButton.setIconSize(QSize(18, 18)) self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton) self.tableViewButton = QPushButton()", "PySide2.QtWidgets import * from PySide2.QtGui import * from PySide2.QtCore import", "self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18)) self.refreshButton.setIconSize(QSize(18, 18)) topLayout.addWidget(self.refreshButton) sep = hou.qt.Separator() if", "topLayout.addWidget(self.patternSearchButton) self.regexSearchButton = QPushButton() self.regexSearchButton.setFixedSize(26, 26) self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search by Regular", "PyQt5.QtCore import * except ImportError: from PySide2.QtWidgets import * from", "26) self.tableViewButton.setToolTip('Table View\\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18)) self.tableViewButton.setIconSize(QSize(18, 18)) self.tableViewButton.setCheckable(True) self.tableViewButton.toggle()", "self.regexSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\\tF1')) middleLayout =", "popup.setStyleSheet(hou.qt.styleSheet()) def mouseReleaseEvent(self, event): if False: # Type hint event", "4) middleLayout.setSpacing(4) self.viewLayout = QStackedLayout(middleLayout) model = QFileSystemModel() model.setRootPath('C:/') treeView", "self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90) * v / 100))", "if not self.valueLadderMode and event.buttons() == Qt.MiddleButton: try: hou.ui.openValueLadder(self.value(), self.setValue,", "if self.valueLadderMode and event.button() == Qt.MiddleButton: hou.ui.closeValueLadder() self.valueLadderMode = False", "18, 18)) self.regexSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\\tF1'))", "link_or_state_icon = 'BUTTONS_link' embedded_icon = 'BUTTONS_pinned' class BrowserMode(QStandardItemModel): def __init__(self):", "mod == Qt.NoModifier and key == Qt.Key_Escape: self.clearEditText() else: super(SearchField,", "self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18)) self.treeViewButton.setIconSize(QSize(18, 18)) self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton) self.tableViewButton =", "26) self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search by Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18)) self.patternSearchButton.setIconSize(QSize(18, 18))", "self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18)) self.fuzzySearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton)", "QPushButton() self.tableViewButton.setFixedSize(26, 26) self.tableViewButton.setToolTip('Table View\\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18)) self.tableViewButton.setIconSize(QSize(18, 18))", "= QKeyEvent key = event.key() mod = event.modifiers() if mod", "== Qt.Key_F: self.searchField.setFocus() elif mod == Qt.NoModifier and key ==", "QVBoxLayout(self) mainLayout.setContentsMargins(0, 0, 0, 0) mainLayout.setSpacing(4) mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout) def", "def mousePressEvent(self, event): if False: # Type hint event =", "Qt.MiddleButton: return elif event.button() == Qt.LeftButton: event = QMouseEvent(QEvent.MouseButtonPress, event.pos(),", "Qt.ControlModifier and key == Qt.Key_Minus: pass elif mod == Qt.ControlModifier", "PyQt5.QtWidgets import * from PyQt5.QtGui import * from PyQt5.QtCore import", "self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole word search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18)) self.wholeSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.wholeSearchButton)", "ContentBrowser(QWidget): def __init__(self, parent=None): super(ContentBrowser, self).__init__(parent) self.setWindowTitle('Content Browser') self.setProperty('houdiniStyle', True)", "False: # Type hint watched = QObject event = QEvent", "26) self.treeViewButton.setToolTip('Tree View\\t\\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18)) self.treeViewButton.setIconSize(QSize(18, 18)) self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton)", "mod == Qt.ControlModifier and key == Qt.Key_2: pass elif mod", "topLayout.setSpacing(2) self.refreshButton = QPushButton() self.refreshButton.setFixedSize(26, 26) self.refreshButton.setToolTip('Update\\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18))", "elif isRevertToDefaultEvent(event): self.revertToDefault() else: super(Slider, self).mouseReleaseEvent(event) class SearchField(QComboBox): def __init__(self,", "embedded_icon = 'BUTTONS_pinned' class BrowserMode(QStandardItemModel): def __init__(self): super(BrowserMode, self).__init__() class", "from PySide2.QtGui import * from PySide2.QtCore import * import hou", "'Table View', self.switchToTableView, shortcut='Ctrl+2')) bottomLayout = QHBoxLayout() bottomLayout.setContentsMargins(4, 0, 4,", "self.refreshButton.setFixedSize(26, 26) self.refreshButton.setToolTip('Update\\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18)) self.refreshButton.setIconSize(QSize(18, 18)) topLayout.addWidget(self.refreshButton) sep", "keyPressEvent(self, event): if False: # Type hint event = QKeyEvent", "self.setWindowTitle('Content Browser') self.setProperty('houdiniStyle', True) topLayout = QHBoxLayout() topLayout.setContentsMargins(4, 4, 4,", "from PyQt5.QtCore import * except ImportError: from PySide2.QtWidgets import *", "hint sep = QFrame sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep) viewModeButtonGroup = QButtonGroup(self)", "self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90) * v / 100)) bottomLayout.addWidget(self.scaleSlider)", "= QButtonGroup(self) viewModeButtonGroup.setExclusive(True) self.treeViewButton = QPushButton() self.treeViewButton.setFixedSize(26, 26) self.treeViewButton.setToolTip('Tree View\\t\\tCtrl+1')", "= QPushButton() self.patternSearchButton.setFixedSize(26, 26) self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search by Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18,", "self.regexSearchButton = QPushButton() self.regexSearchButton.setFixedSize(26, 26) self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search by Regular Expression')", "View\\t\\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18)) self.treeViewButton.setIconSize(QSize(18, 18)) self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton) self.tableViewButton", "self.defaultValue = 0 self.valueLadderMode = False def revertToDefault(self): self.setValue(self.defaultValue) def", "self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18)) self.wholeSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton = QPushButton()", "Qt.NoModifier and key == Qt.Key_F5: pass elif mod == Qt.ControlModifier", "searchModeButtonGroup.setExclusive(True) self.wholeSearchButton = QPushButton() self.wholeSearchButton.setFixedSize(26, 26) self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole word search')", "def __init__(self, orientation=Qt.Horizontal, parent=None): super(Slider, self).__init__(orientation, parent) self.defaultValue = 0", "class SearchField(QComboBox): def __init__(self, parent=None): super(SearchField, self).__init__(parent) self.setEditable(True) edit =", "comp = self.completer() comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5) popup = comp.popup()", "isRevertToDefaultEvent(event): self.clearEditText() event.accept() return True return False def keyPressEvent(self, event):", "mod == Qt.NoModifier and key == Qt.Key_F5: pass elif mod", "isRevertToDefaultEvent(event): self.revertToDefault() else: super(Slider, self).mouseReleaseEvent(event) class SearchField(QComboBox): def __init__(self, parent=None):", "Qt.AltModifier), bool(event.modifiers() & Qt.ShiftModifier)) else: super(Slider, self).mouseMoveEvent(event) def mouseReleaseEvent(self, event):", "Slider() self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90) * v /", "self.setContextMenuPolicy(Qt.CustomContextMenu) class ContentBrowser(QWidget): def __init__(self, parent=None): super(ContentBrowser, self).__init__(parent) self.setWindowTitle('Content Browser')", "event = QMouseEvent if isRevertToDefaultEvent(event): self.clearEditText() def eventFilter(self, watched, event):", "topLayout.addWidget(sep) viewModeButtonGroup = QButtonGroup(self) viewModeButtonGroup.setExclusive(True) self.treeViewButton = QPushButton() self.treeViewButton.setFixedSize(26, 26)", "False: # Type hint event = QMouseEvent if isRevertToDefaultEvent(event): self.clearEditText()", "18)) searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton = QPushButton() self.fuzzySearchButton.setFixedSize(26, 26) self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle()", "__init__(self, parent=None): super(BrowserTableView, self).__init__(parent) self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120, 90))", "== Qt.MiddleButton: try: hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int) except hou.OperationFailed: return else:", "search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18)) self.wholeSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton =", "self.viewLayout = QStackedLayout(middleLayout) model = QFileSystemModel() model.setRootPath('C:/') treeView = BrowserTreeView()", "self).__init__(parent) self.setEditable(True) edit = self.lineEdit() edit.setPlaceholderText('Search...') edit.installEventFilter(self) edit.setFont(QFont('Segoe UI')) self.setFixedHeight(26)", "edit.setPlaceholderText('Search...') edit.installEventFilter(self) edit.setFont(QFont('Segoe UI')) self.setFixedHeight(26) comp = self.completer() comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains)", "self.refreshButton.setToolTip('Update\\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18)) self.refreshButton.setIconSize(QSize(18, 18)) topLayout.addWidget(self.refreshButton) sep = hou.qt.Separator()", "Qt.ControlModifier and key == Qt.Key_1: pass elif mod == Qt.ControlModifier", "self.patternSearchButton = QPushButton() self.patternSearchButton.setFixedSize(26, 26) self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search by Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha',", "key == Qt.Key_Equal: pass elif mod == Qt.ControlModifier and key", "self.tableViewButton.setIconSize(QSize(18, 18)) self.tableViewButton.setCheckable(True) self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep) self.searchField = SearchField()", "class BrowserMode(QStandardItemModel): def __init__(self): super(BrowserMode, self).__init__() class BrowserTreeView(QTreeView): def __init__(self,", "v: tableView.setIconSize(QSize(120, 90) * v / 100)) bottomLayout.addWidget(self.scaleSlider) mainLayout =", "hou from hammer_tools.utils import createAction def isRevertToDefaultEvent(event): return event.modifiers() ==", "self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self,", "def __init__(self, parent=None): super(BrowserTableView, self).__init__(parent) self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120,", "topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\\tF1')) middleLayout = QHBoxLayout() middleLayout.setContentsMargins(4, 0, 0, 4)", "super(SearchField, self).__init__(parent) self.setEditable(True) edit = self.lineEdit() edit.setPlaceholderText('Search...') edit.installEventFilter(self) edit.setFont(QFont('Segoe UI'))", "key == Qt.Key_F1: pass else: super(ContentBrowser, self).keyPressEvent(event) if __name__ ==", "QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer) self.scaleSlider = Slider() self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120)", "self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep) self.searchField = SearchField() self.searchField.setToolTip('Search\\tCtrl+F, F3') topLayout.addWidget(self.searchField)", "PySide2.QtCore import * import hou from hammer_tools.utils import createAction def", "parent=None): super(SearchField, self).__init__(parent) self.setEditable(True) edit = self.lineEdit() edit.setPlaceholderText('Search...') edit.installEventFilter(self) edit.setFont(QFont('Segoe", "False: # Type hint event = QMouseEvent if self.valueLadderMode and", "26) self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole word search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18)) self.wholeSearchButton.setIconSize(QSize(18, 18))", "popup = comp.popup() popup.setStyleSheet(hou.qt.styleSheet()) def mouseReleaseEvent(self, event): if False: #", "False: # Type hint event = QKeyEvent key = event.key()", "tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView)", "def __init__(self): super(BrowserMode, self).__init__() class BrowserTreeView(QTreeView): def __init__(self, parent=None): super(BrowserTreeView,", "return elif event.button() == Qt.LeftButton: event = QMouseEvent(QEvent.MouseButtonPress, event.pos(), Qt.MiddleButton,", "and key == Qt.Key_Minus: pass elif mod == Qt.ControlModifier and", "else: super(SearchField, self).keyPressEvent(event) def hidePopup(self): super(SearchField, self).hidePopup() self.lineEdit().setFocus() link_or_state_icon =", "__init__(self, orientation=Qt.Horizontal, parent=None): super(Slider, self).__init__(orientation, parent) self.defaultValue = 0 self.valueLadderMode", "QMouseEvent if not self.valueLadderMode and event.buttons() == Qt.MiddleButton: try: hou.ui.openValueLadder(self.value(),", "QPushButton() self.regexSearchButton.setFixedSize(26, 26) self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search by Regular Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18,", "Help\\tF1')) middleLayout = QHBoxLayout() middleLayout.setContentsMargins(4, 0, 0, 4) middleLayout.setSpacing(4) self.viewLayout", "self.clearEditText() else: super(SearchField, self).keyPressEvent(event) def hidePopup(self): super(SearchField, self).hidePopup() self.lineEdit().setFocus() link_or_state_icon", "QHBoxLayout() middleLayout.setContentsMargins(4, 0, 0, 4) middleLayout.setSpacing(4) self.viewLayout = QStackedLayout(middleLayout) model", "event.buttons() == Qt.MiddleButton: try: hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int) except hou.OperationFailed: return", "self.wholeSearchButton = QPushButton() self.wholeSearchButton.setFixedSize(26, 26) self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole word search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase',", "key = event.key() mod = event.modifiers() if mod == Qt.NoModifier", "self).keyPressEvent(event) if __name__ == '__main__': app = QApplication([]) window =", "by Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18)) self.patternSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton) self.regexSearchButton", "and key == Qt.Key_F: self.searchField.setFocus() elif mod == Qt.NoModifier and", "edit.setFont(QFont('Segoe UI')) self.setFixedHeight(26) comp = self.completer() comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5)", "viewModeButtonGroup.setExclusive(True) self.treeViewButton = QPushButton() self.treeViewButton.setFixedSize(26, 26) self.treeViewButton.setToolTip('Tree View\\t\\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18,", "False elif isRevertToDefaultEvent(event): self.revertToDefault() else: super(Slider, self).mouseReleaseEvent(event) class SearchField(QComboBox): def", "* from PyQt5.QtCore import * except ImportError: from PySide2.QtWidgets import", "UI')) self.setFixedHeight(26) comp = self.completer() comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5) popup", "100)) bottomLayout.addWidget(self.scaleSlider) mainLayout = QVBoxLayout(self) mainLayout.setContentsMargins(0, 0, 0, 0) mainLayout.setSpacing(4)", "= QPushButton() self.fuzzySearchButton.setFixedSize(26, 26) self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18,", "mouseReleaseEvent(self, event): if False: # Type hint event = QMouseEvent", "= Slider() self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90) * v", "Qt.Key_1: pass elif mod == Qt.ControlModifier and key == Qt.Key_2:", "self.switchToTreeView, shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2')) bottomLayout =", "event.key() mod = event.modifiers() if mod == Qt.NoModifier and key", "= QMouseEvent(QEvent.MouseButtonPress, event.pos(), Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier) super(Slider, self).mousePressEvent(event) def mouseMoveEvent(self,", "self.setEditable(True) edit = self.lineEdit() edit.setPlaceholderText('Search...') edit.installEventFilter(self) edit.setFont(QFont('Segoe UI')) self.setFixedHeight(26) comp", "and key == Qt.Key_2: pass elif mod == Qt.NoModifier and", "pass elif mod == Qt.NoModifier and key == Qt.Key_F1: pass", "self.defaultValue = value if reset: self.revertToDefault() def mousePressEvent(self, event): if", "True) topLayout = QHBoxLayout() topLayout.setContentsMargins(4, 4, 4, 2) topLayout.setSpacing(2) self.refreshButton", "Qt.Key_Equal: pass elif mod == Qt.ControlModifier and key == Qt.Key_Minus:", "return True return False def keyPressEvent(self, event): if False: #", "topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton = QPushButton() self.patternSearchButton.setFixedSize(26, 26) self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search by Pattern')", "Qt.LeftButton: event = QMouseEvent(QEvent.MouseButtonPress, event.pos(), Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier) super(Slider, self).mousePressEvent(event)", "super(SearchField, self).keyPressEvent(event) def hidePopup(self): super(SearchField, self).hidePopup() self.lineEdit().setFocus() link_or_state_icon = 'BUTTONS_link'", "self.valueLadderMode = True elif self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(), event.globalY(), bool(event.modifiers() & Qt.AltModifier),", "pass elif mod == Qt.ControlModifier and key == Qt.Key_Minus: pass", "__name__ == '__main__': app = QApplication([]) window = ContentBrowser() window.show()", "and key == Qt.Key_Escape: self.clearEditText() else: super(SearchField, self).keyPressEvent(event) def hidePopup(self):", "18)) self.wholeSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton = QPushButton() self.fuzzySearchButton.setFixedSize(26, 26)", "26) self.refreshButton.setToolTip('Update\\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18)) self.refreshButton.setIconSize(QSize(18, 18)) topLayout.addWidget(self.refreshButton) sep =", "from __future__ import print_function try: from PyQt5.QtWidgets import * from", "QPushButton() self.fuzzySearchButton.setFixedSize(26, 26) self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18))", "'BUTTONS_pinned' class BrowserMode(QStandardItemModel): def __init__(self): super(BrowserMode, self).__init__() class BrowserTreeView(QTreeView): def", "value if reset: self.revertToDefault() def mousePressEvent(self, event): if False: #", "== Qt.ControlModifier and key == Qt.Key_Equal: pass elif mod ==", "event.modifiers() if mod == Qt.NoModifier and key == Qt.Key_Escape: self.clearEditText()", "self.treeViewButton.setIconSize(QSize(18, 18)) self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton) self.tableViewButton = QPushButton() self.tableViewButton.setFixedSize(26, 26)", "= QPushButton() self.wholeSearchButton.setFixedSize(26, 26) self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole word search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18,", "= False elif isRevertToDefaultEvent(event): self.revertToDefault() else: super(Slider, self).mouseReleaseEvent(event) class SearchField(QComboBox):", "hammer_tools.utils import createAction def isRevertToDefaultEvent(event): return event.modifiers() == Qt.ControlModifier and", "searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton = QPushButton() self.patternSearchButton.setFixedSize(26, 26) self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search by", "18, 18)) self.wholeSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton = QPushButton() self.fuzzySearchButton.setFixedSize(26,", "Qt.MiddleButton class Slider(QSlider): def __init__(self, orientation=Qt.Horizontal, parent=None): super(Slider, self).__init__(orientation, parent)", "= QPushButton() self.treeViewButton.setFixedSize(26, 26) self.treeViewButton.setToolTip('Tree View\\t\\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18)) self.treeViewButton.setIconSize(QSize(18,", "= BrowserTreeView() treeView.setModel(model) treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView) tableView = BrowserTableView() tableView.setModel(model) tableView.setRootIndex(model.index('C:/'))", "middleLayout.setSpacing(4) self.viewLayout = QStackedLayout(middleLayout) model = QFileSystemModel() model.setRootPath('C:/') treeView =", "and key == Qt.Key_F1: pass else: super(ContentBrowser, self).keyPressEvent(event) if __name__", "= QButtonGroup(self) searchModeButtonGroup.setExclusive(True) self.wholeSearchButton = QPushButton() self.wholeSearchButton.setFixedSize(26, 26) self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole", "self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self, 'Table", "super(BrowserTreeView, self).__init__(parent) self.setAlternatingRowColors(True) class BrowserTableView(QListView): def __init__(self, parent=None): super(BrowserTableView, self).__init__(parent)", "= QHBoxLayout() topLayout.setContentsMargins(4, 4, 4, 2) topLayout.setSpacing(2) self.refreshButton = QPushButton()", "18)) self.regexSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\\tF1')) middleLayout", "else: super(ContentBrowser, self).keyPressEvent(event) if __name__ == '__main__': app = QApplication([])", "search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18)) self.fuzzySearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton =", "try: from PyQt5.QtWidgets import * from PyQt5.QtGui import * from", "import * from PyQt5.QtCore import * except ImportError: from PySide2.QtWidgets", "'Tree View', self.switchToTreeView, shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2'))", "super(Slider, self).mouseReleaseEvent(event) class SearchField(QComboBox): def __init__(self, parent=None): super(SearchField, self).__init__(parent) self.setEditable(True)", "sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep) viewModeButtonGroup = QButtonGroup(self) viewModeButtonGroup.setExclusive(True) self.treeViewButton = QPushButton() self.treeViewButton.setFixedSize(26,", "key == Qt.Key_2: pass elif mod == Qt.NoModifier and key", "F3') topLayout.addWidget(self.searchField) searchModeButtonGroup = QButtonGroup(self) searchModeButtonGroup.setExclusive(True) self.wholeSearchButton = QPushButton() self.wholeSearchButton.setFixedSize(26,", "super(Slider, self).mousePressEvent(event) def mouseMoveEvent(self, event): if False: # Type hint", "Qt.Key_2: pass elif mod == Qt.NoModifier and key == Qt.Key_F1:", "self).hidePopup() self.lineEdit().setFocus() link_or_state_icon = 'BUTTONS_link' embedded_icon = 'BUTTONS_pinned' class BrowserMode(QStandardItemModel):", "self.setValue, data_type=hou.valueLadderDataType.Int) except hou.OperationFailed: return else: self.valueLadderMode = True elif", "== Qt.Key_2: pass elif mod == Qt.NoModifier and key ==", "self).__init__(orientation, parent) self.defaultValue = 0 self.valueLadderMode = False def revertToDefault(self):", "hint event = QKeyEvent key = event.key() mod = event.modifiers()", "import hou from hammer_tools.utils import createAction def isRevertToDefaultEvent(event): return event.modifiers()", "18)) self.patternSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton) self.regexSearchButton = QPushButton() self.regexSearchButton.setFixedSize(26, 26)", "mod == Qt.NoModifier and key == Qt.Key_F3: self.searchField.setFocus() elif mod", "topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton = QPushButton() self.fuzzySearchButton.setFixedSize(26, 26) self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy search')", "def __init__(self, parent=None): super(ContentBrowser, self).__init__(parent) self.setWindowTitle('Content Browser') self.setProperty('houdiniStyle', True) topLayout", "self.regexSearchButton.setFixedSize(26, 26) self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search by Regular Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18))", "== Qt.ControlModifier and key == Qt.Key_2: pass elif mod ==", "super(Slider, self).__init__(orientation, parent) self.defaultValue = 0 self.valueLadderMode = False def", "self.switchToTableView, shortcut='Ctrl+2')) bottomLayout = QHBoxLayout() bottomLayout.setContentsMargins(4, 0, 4, 4) bottomLayout.setSpacing(2)", "& Qt.ShiftModifier)) else: super(Slider, self).mouseMoveEvent(event) def mouseReleaseEvent(self, event): if False:", "self.searchField.setFocus() elif mod == Qt.ControlModifier and key == Qt.Key_Equal: pass", "Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier) super(Slider, self).mousePressEvent(event) def mouseMoveEvent(self, event): if False:", "self.regexSearchButton.setToolTip('Search by Regular Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18)) self.regexSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.regexSearchButton)", "key == Qt.Key_F3: self.searchField.setFocus() elif mod == Qt.ControlModifier and key", "hint event = QMouseEvent if isRevertToDefaultEvent(event): self.clearEditText() def eventFilter(self, watched,", "* from PySide2.QtGui import * from PySide2.QtCore import * import", "parent=None): super(Slider, self).__init__(orientation, parent) self.defaultValue = 0 self.valueLadderMode = False", "self.searchField.setFocus() elif mod == Qt.NoModifier and key == Qt.Key_F3: self.searchField.setFocus()", "= QVBoxLayout(self) mainLayout.setContentsMargins(0, 0, 0, 0) mainLayout.setSpacing(4) mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout)", "BrowserMode(QStandardItemModel): def __init__(self): super(BrowserMode, self).__init__() class BrowserTreeView(QTreeView): def __init__(self, parent=None):", "18)) searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton = QPushButton() self.patternSearchButton.setFixedSize(26, 26) self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search", "Qt.ControlModifier and key == Qt.Key_Equal: pass elif mod == Qt.ControlModifier", "18, 18)) settingsButton.setIconSize(QSize(18, 18)) bottomLayout.addWidget(settingsButton) spacer = QSpacerItem(0, 0, QSizePolicy.Expanding,", "== Qt.ControlModifier and key == Qt.Key_1: pass elif mod ==", "* except ImportError: from PySide2.QtWidgets import * from PySide2.QtGui import", "self).keyPressEvent(event) def hidePopup(self): super(SearchField, self).hidePopup() self.lineEdit().setFocus() link_or_state_icon = 'BUTTONS_link' embedded_icon", "key == Qt.Key_F: self.searchField.setFocus() elif mod == Qt.NoModifier and key", "# Type hint sep = QFrame sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep) viewModeButtonGroup", "QFileSystemModel() model.setRootPath('C:/') treeView = BrowserTreeView() treeView.setModel(model) treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView) tableView =", "and key == Qt.Key_F3: self.searchField.setFocus() elif mod == Qt.ControlModifier and", "super(SearchField, self).hidePopup() self.lineEdit().setFocus() link_or_state_icon = 'BUTTONS_link' embedded_icon = 'BUTTONS_pinned' class", "event = QMouseEvent if event.button() == Qt.MiddleButton: return elif event.button()", "event = QMouseEvent if not self.valueLadderMode and event.buttons() == Qt.MiddleButton:", "tableView = BrowserTableView() tableView.setModel(model) tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self,", "spacer = QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer) self.scaleSlider = Slider()", "searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton) self.regexSearchButton = QPushButton() self.regexSearchButton.setFixedSize(26, 26) self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search by", "self.fuzzySearchButton.setToolTip('Fuzzy search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18)) self.fuzzySearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton", "hint watched = QObject event = QEvent if watched ==", "super(Slider, self).mouseMoveEvent(event) def mouseReleaseEvent(self, event): if False: # Type hint", "= hou.qt.Separator() if False: # Type hint sep = QFrame", "topLayout.addWidget(self.refreshButton) sep = hou.qt.Separator() if False: # Type hint sep", "topLayout.addWidget(sep) self.searchField = SearchField() self.searchField.setToolTip('Search\\tCtrl+F, F3') topLayout.addWidget(self.searchField) searchModeButtonGroup = QButtonGroup(self)", "isRevertToDefaultEvent(event): return event.modifiers() == Qt.ControlModifier and event.button() == Qt.MiddleButton class", "event.button() == Qt.LeftButton: event = QMouseEvent(QEvent.MouseButtonPress, event.pos(), Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier)", "# Type hint event = QMouseEvent if self.valueLadderMode and event.button()", "SearchField(QComboBox): def __init__(self, parent=None): super(SearchField, self).__init__(parent) self.setEditable(True) edit = self.lineEdit()", "topLayout = QHBoxLayout() topLayout.setContentsMargins(4, 4, 4, 2) topLayout.setSpacing(2) self.refreshButton =", "Qt.MiddleButton: try: hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int) except hou.OperationFailed: return else: self.valueLadderMode", "eventFilter(self, watched, event): if False: # Type hint watched =", "= value if reset: self.revertToDefault() def mousePressEvent(self, event): if False:", "SearchField() self.searchField.setToolTip('Search\\tCtrl+F, F3') topLayout.addWidget(self.searchField) searchModeButtonGroup = QButtonGroup(self) searchModeButtonGroup.setExclusive(True) self.wholeSearchButton =", "def __init__(self, parent=None): super(BrowserTreeView, self).__init__(parent) self.setAlternatingRowColors(True) class BrowserTableView(QListView): def __init__(self,", "self.setValue(self.defaultValue) def setDefaultValue(self, value, reset=True): self.defaultValue = value if reset:", "def keyPressEvent(self, event): if False: # Type hint event =", "comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5) popup = comp.popup() popup.setStyleSheet(hou.qt.styleSheet()) def mouseReleaseEvent(self,", "= QPushButton() self.tableViewButton.setFixedSize(26, 26) self.tableViewButton.setToolTip('Table View\\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18)) self.tableViewButton.setIconSize(QSize(18,", "if event.type() == QEvent.MouseButtonRelease and isRevertToDefaultEvent(event): self.clearEditText() event.accept() return True", "Qt.ControlModifier and key == Qt.Key_2: pass elif mod == Qt.NoModifier", "self.patternSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton) self.regexSearchButton = QPushButton() self.regexSearchButton.setFixedSize(26, 26) self.regexSearchButton.setCheckable(True)", "18)) settingsButton.setIconSize(QSize(18, 18)) bottomLayout.addWidget(settingsButton) spacer = QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored)", "== Qt.Key_1: pass elif mod == Qt.ControlModifier and key ==", "self.valueLadderMode and event.button() == Qt.MiddleButton: hou.ui.closeValueLadder() self.valueLadderMode = False elif", "18)) self.refreshButton.setIconSize(QSize(18, 18)) topLayout.addWidget(self.refreshButton) sep = hou.qt.Separator() if False: #", "Qt.ShiftModifier)) else: super(Slider, self).mouseMoveEvent(event) def mouseReleaseEvent(self, event): if False: #", "self.revertToDefault() else: super(Slider, self).mouseReleaseEvent(event) class SearchField(QComboBox): def __init__(self, parent=None): super(SearchField,", "self).__init__(parent) self.setAlternatingRowColors(True) class BrowserTableView(QListView): def __init__(self, parent=None): super(BrowserTableView, self).__init__(parent) self.setViewMode(QListView.IconMode)", "watched, event): if False: # Type hint watched = QObject", "if False: # Type hint event = QMouseEvent if isRevertToDefaultEvent(event):", "= QHBoxLayout() bottomLayout.setContentsMargins(4, 0, 4, 4) bottomLayout.setSpacing(2) settingsButton = QPushButton()", "ImportError: from PySide2.QtWidgets import * from PySide2.QtGui import * from", "print_function try: from PyQt5.QtWidgets import * from PyQt5.QtGui import *", "class ContentBrowser(QWidget): def __init__(self, parent=None): super(ContentBrowser, self).__init__(parent) self.setWindowTitle('Content Browser') self.setProperty('houdiniStyle',", "super(ContentBrowser, self).keyPressEvent(event) if __name__ == '__main__': app = QApplication([]) window", "hou.qt.Separator() if False: # Type hint sep = QFrame sep.setFixedWidth(2)", "Qt.ControlModifier and key == Qt.Key_F: self.searchField.setFocus() elif mod == Qt.NoModifier", "and key == Qt.Key_Equal: pass elif mod == Qt.ControlModifier and", "90) * v / 100)) bottomLayout.addWidget(self.scaleSlider) mainLayout = QVBoxLayout(self) mainLayout.setContentsMargins(0,", "Qt.Key_F5: pass elif mod == Qt.ControlModifier and key == Qt.Key_F:", "= QPushButton() settingsButton.setFixedSize(26, 26) settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18)) settingsButton.setIconSize(QSize(18, 18))", "self.viewLayout.addWidget(treeView) tableView = BrowserTableView() tableView.setModel(model) tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView)", "QKeyEvent key = event.key() mod = event.modifiers() if mod ==", "self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90) * v / 100)) bottomLayout.addWidget(self.scaleSlider) mainLayout", "model = QFileSystemModel() model.setRootPath('C:/') treeView = BrowserTreeView() treeView.setModel(model) treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView)", "if event.button() == Qt.MiddleButton: return elif event.button() == Qt.LeftButton: event", "18)) self.treeViewButton.setIconSize(QSize(18, 18)) self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton) self.tableViewButton = QPushButton() self.tableViewButton.setFixedSize(26,", "18)) searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton) self.regexSearchButton = QPushButton() self.regexSearchButton.setFixedSize(26, 26) self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search", "= 'BUTTONS_link' embedded_icon = 'BUTTONS_pinned' class BrowserMode(QStandardItemModel): def __init__(self): super(BrowserMode,", "def hidePopup(self): super(SearchField, self).hidePopup() self.lineEdit().setFocus() link_or_state_icon = 'BUTTONS_link' embedded_icon =", "= QPushButton() self.refreshButton.setFixedSize(26, 26) self.refreshButton.setToolTip('Update\\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18)) self.refreshButton.setIconSize(QSize(18, 18))", "self).__init__() class BrowserTreeView(QTreeView): def __init__(self, parent=None): super(BrowserTreeView, self).__init__(parent) self.setAlternatingRowColors(True) class", "sep = QFrame sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep) viewModeButtonGroup = QButtonGroup(self) viewModeButtonGroup.setExclusive(True)", "self.treeViewButton.setChecked(True) def switchToTableView(self): self.viewLayout.setCurrentIndex(1) self.scaleSlider.show() self.tableViewButton.setChecked(True) def keyPressEvent(self, event): if", "topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\\tF1')) middleLayout = QHBoxLayout() middleLayout.setContentsMargins(4, 0, 0,", "== Qt.MiddleButton: return elif event.button() == Qt.LeftButton: event = QMouseEvent(QEvent.MouseButtonPress,", "== Qt.Key_Escape: self.clearEditText() else: super(SearchField, self).keyPressEvent(event) def hidePopup(self): super(SearchField, self).hidePopup()", "self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18)) self.tableViewButton.setIconSize(QSize(18, 18)) self.tableViewButton.setCheckable(True) self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep)", "== Qt.Key_F3: self.searchField.setFocus() elif mod == Qt.ControlModifier and key ==", "== Qt.NoModifier and key == Qt.Key_F3: self.searchField.setFocus() elif mod ==", "parent) self.defaultValue = 0 self.valueLadderMode = False def revertToDefault(self): self.setValue(self.defaultValue)", "switchToTableView(self): self.viewLayout.setCurrentIndex(1) self.scaleSlider.show() self.tableViewButton.setChecked(True) def keyPressEvent(self, event): if False: #", "bool(event.modifiers() & Qt.AltModifier), bool(event.modifiers() & Qt.ShiftModifier)) else: super(Slider, self).mouseMoveEvent(event) def", "key == Qt.Key_Escape: self.clearEditText() else: super(SearchField, self).keyPressEvent(event) def hidePopup(self): super(SearchField,", "18)) bottomLayout.addWidget(settingsButton) spacer = QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer) self.scaleSlider", "QMouseEvent if event.button() == Qt.MiddleButton: return elif event.button() == Qt.LeftButton:", "else: super(Slider, self).mouseMoveEvent(event) def mouseReleaseEvent(self, event): if False: # Type", "bottomLayout.addWidget(self.scaleSlider) mainLayout = QVBoxLayout(self) mainLayout.setContentsMargins(0, 0, 0, 0) mainLayout.setSpacing(4) mainLayout.addLayout(topLayout)", "BrowserTableView() tableView.setModel(model) tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self, 'Tree View',", "Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18)) self.patternSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton) self.regexSearchButton =", "# Type hint event = QMouseEvent if not self.valueLadderMode and", "== QEvent.MouseButtonRelease and isRevertToDefaultEvent(event): self.clearEditText() event.accept() return True return False", "searchModeButtonGroup = QButtonGroup(self) searchModeButtonGroup.setExclusive(True) self.wholeSearchButton = QPushButton() self.wholeSearchButton.setFixedSize(26, 26) self.wholeSearchButton.setCheckable(True)", "0, 4) middleLayout.setSpacing(4) self.viewLayout = QStackedLayout(middleLayout) model = QFileSystemModel() model.setRootPath('C:/')", "searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton = QPushButton() self.fuzzySearchButton.setFixedSize(26, 26) self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy", "event.accept() return True return False def keyPressEvent(self, event): if False:", "comp.setMaxVisibleItems(5) popup = comp.popup() popup.setStyleSheet(hou.qt.styleSheet()) def mouseReleaseEvent(self, event): if False:", "key == Qt.Key_1: pass elif mod == Qt.ControlModifier and key", "= QPushButton() self.regexSearchButton.setFixedSize(26, 26) self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search by Regular Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match',", "4, 2) topLayout.setSpacing(2) self.refreshButton = QPushButton() self.refreshButton.setFixedSize(26, 26) self.refreshButton.setToolTip('Update\\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload',", "0, 4, 4) bottomLayout.setSpacing(2) settingsButton = QPushButton() settingsButton.setFixedSize(26, 26) settingsButton.setToolTip('Settings')", "self.scaleSlider.show() self.tableViewButton.setChecked(True) def keyPressEvent(self, event): if False: # Type hint", "def switchToTableView(self): self.viewLayout.setCurrentIndex(1) self.scaleSlider.show() self.tableViewButton.setChecked(True) def keyPressEvent(self, event): if False:", "18, 18)) self.tableViewButton.setIconSize(QSize(18, 18)) self.tableViewButton.setCheckable(True) self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep) self.searchField", "settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18)) settingsButton.setIconSize(QSize(18, 18)) bottomLayout.addWidget(settingsButton) spacer = QSpacerItem(0,", "comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5) popup = comp.popup() popup.setStyleSheet(hou.qt.styleSheet()) def mouseReleaseEvent(self, event):", "Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18)) self.regexSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser',", "QMouseEvent if self.valueLadderMode and event.button() == Qt.MiddleButton: hou.ui.closeValueLadder() self.valueLadderMode =", "mod = event.modifiers() if mod == Qt.NoModifier and key ==", "QHBoxLayout() topLayout.setContentsMargins(4, 4, 4, 2) topLayout.setSpacing(2) self.refreshButton = QPushButton() self.refreshButton.setFixedSize(26,", "== Qt.LeftButton: event = QMouseEvent(QEvent.MouseButtonPress, event.pos(), Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier) super(Slider,", "def revertToDefault(self): self.setValue(self.defaultValue) def setDefaultValue(self, value, reset=True): self.defaultValue = value", "createAction def isRevertToDefaultEvent(event): return event.modifiers() == Qt.ControlModifier and event.button() ==", "0 self.valueLadderMode = False def revertToDefault(self): self.setValue(self.defaultValue) def setDefaultValue(self, value,", "__init__(self, parent=None): super(BrowserTreeView, self).__init__(parent) self.setAlternatingRowColors(True) class BrowserTableView(QListView): def __init__(self, parent=None):", "True return False def keyPressEvent(self, event): if False: # Type", "QPushButton() self.refreshButton.setFixedSize(26, 26) self.refreshButton.setToolTip('Update\\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18)) self.refreshButton.setIconSize(QSize(18, 18)) topLayout.addWidget(self.refreshButton)", "= QFileSystemModel() model.setRootPath('C:/') treeView = BrowserTreeView() treeView.setModel(model) treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView) tableView", "= QMouseEvent if isRevertToDefaultEvent(event): self.clearEditText() def eventFilter(self, watched, event): if", "mod == Qt.ControlModifier and key == Qt.Key_F: self.searchField.setFocus() elif mod", "parent=None): super(ContentBrowser, self).__init__(parent) self.setWindowTitle('Content Browser') self.setProperty('houdiniStyle', True) topLayout = QHBoxLayout()", "searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\\tF1')) middleLayout = QHBoxLayout() middleLayout.setContentsMargins(4,", "if watched == self.lineEdit(): if event.type() == QEvent.MouseButtonRelease and isRevertToDefaultEvent(event):", "hou.OperationFailed: return else: self.valueLadderMode = True elif self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(), event.globalY(),", "event.button() == Qt.MiddleButton: hou.ui.closeValueLadder() self.valueLadderMode = False elif isRevertToDefaultEvent(event): self.revertToDefault()", "self.valueLadderMode = False def revertToDefault(self): self.setValue(self.defaultValue) def setDefaultValue(self, value, reset=True):", "self.refreshButton = QPushButton() self.refreshButton.setFixedSize(26, 26) self.refreshButton.setToolTip('Update\\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18)) self.refreshButton.setIconSize(QSize(18,", "self.fuzzySearchButton.setFixedSize(26, 26) self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18)) self.fuzzySearchButton.setIconSize(QSize(18,", "pass else: super(ContentBrowser, self).keyPressEvent(event) if __name__ == '__main__': app =", "self.wholeSearchButton.setToolTip('Whole word search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18)) self.wholeSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton)", "View', self.switchToTreeView, shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2')) bottomLayout", "mod == Qt.ControlModifier and key == Qt.Key_Minus: pass elif mod", "self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(), event.globalY(), bool(event.modifiers() & Qt.AltModifier), bool(event.modifiers() & Qt.ShiftModifier)) else:", "2) topLayout.setSpacing(2) self.refreshButton = QPushButton() self.refreshButton.setFixedSize(26, 26) self.refreshButton.setToolTip('Update\\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18,", "'Show Help\\tF1')) middleLayout = QHBoxLayout() middleLayout.setContentsMargins(4, 0, 0, 4) middleLayout.setSpacing(4)", "import * from PyQt5.QtGui import * from PyQt5.QtCore import *", "if False: # Type hint sep = QFrame sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine)", "18)) self.tableViewButton.setCheckable(True) self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep) self.searchField = SearchField() self.searchField.setToolTip('Search\\tCtrl+F,", "if False: # Type hint event = QMouseEvent if not", "shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2')) bottomLayout = QHBoxLayout()", "self.tableViewButton.setToolTip('Table View\\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18)) self.tableViewButton.setIconSize(QSize(18, 18)) self.tableViewButton.setCheckable(True) self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton)", "90)) self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu) class ContentBrowser(QWidget): def __init__(self, parent=None): super(ContentBrowser, self).__init__(parent)", "except hou.OperationFailed: return else: self.valueLadderMode = True elif self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(),", "hint event = QMouseEvent if event.button() == Qt.MiddleButton: return elif", "parent=None): super(BrowserTreeView, self).__init__(parent) self.setAlternatingRowColors(True) class BrowserTableView(QListView): def __init__(self, parent=None): super(BrowserTableView,", "self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18)) self.regexSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show", "self).__init__(parent) self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120, 90)) self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu) class", "self.tableViewButton.setFixedSize(26, 26) self.tableViewButton.setToolTip('Table View\\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18)) self.tableViewButton.setIconSize(QSize(18, 18)) self.tableViewButton.setCheckable(True)", "== Qt.NoModifier and key == Qt.Key_F5: pass elif mod ==", "self.addAction(createAction(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2')) bottomLayout = QHBoxLayout() bottomLayout.setContentsMargins(4, 0,", "18, 18)) self.refreshButton.setIconSize(QSize(18, 18)) topLayout.addWidget(self.refreshButton) sep = hou.qt.Separator() if False:", "== Qt.MiddleButton: hou.ui.closeValueLadder() self.valueLadderMode = False elif isRevertToDefaultEvent(event): self.revertToDefault() else:", "= 'BUTTONS_pinned' class BrowserMode(QStandardItemModel): def __init__(self): super(BrowserMode, self).__init__() class BrowserTreeView(QTreeView):", "== Qt.ControlModifier and event.button() == Qt.MiddleButton class Slider(QSlider): def __init__(self,", "tableView.setModel(model) tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self, 'Tree View', self.switchToTreeView,", "0, 0) mainLayout.setSpacing(4) mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout) def switchToTreeView(self): self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide()", "= event.modifiers() if mod == Qt.NoModifier and key == Qt.Key_Escape:", "self.searchField = SearchField() self.searchField.setToolTip('Search\\tCtrl+F, F3') topLayout.addWidget(self.searchField) searchModeButtonGroup = QButtonGroup(self) searchModeButtonGroup.setExclusive(True)", "hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int) except hou.OperationFailed: return else: self.valueLadderMode = True", "event.type() == QEvent.MouseButtonRelease and isRevertToDefaultEvent(event): self.clearEditText() event.accept() return True return", "mainLayout.addLayout(bottomLayout) def switchToTreeView(self): self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide() self.treeViewButton.setChecked(True) def switchToTableView(self): self.viewLayout.setCurrentIndex(1) self.scaleSlider.show()", "bottomLayout.addWidget(settingsButton) spacer = QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer) self.scaleSlider =", "self.scaleSlider = Slider() self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90) *", "0) mainLayout.setSpacing(4) mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout) def switchToTreeView(self): self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide() self.treeViewButton.setChecked(True)", "reset: self.revertToDefault() def mousePressEvent(self, event): if False: # Type hint", "pass elif mod == Qt.ControlModifier and key == Qt.Key_F: self.searchField.setFocus()", "== Qt.Key_F5: pass elif mod == Qt.ControlModifier and key ==", "QObject event = QEvent if watched == self.lineEdit(): if event.type()", "topLayout.setContentsMargins(4, 4, 4, 2) topLayout.setSpacing(2) self.refreshButton = QPushButton() self.refreshButton.setFixedSize(26, 26)", "topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\\tF1')) middleLayout = QHBoxLayout() middleLayout.setContentsMargins(4, 0,", "if False: # Type hint event = QKeyEvent key =", "self.tableViewButton = QPushButton() self.tableViewButton.setFixedSize(26, 26) self.tableViewButton.setToolTip('Table View\\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18))", "tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1'))", "self).mousePressEvent(event) def mouseMoveEvent(self, event): if False: # Type hint event", "= QFrame sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep) viewModeButtonGroup = QButtonGroup(self) viewModeButtonGroup.setExclusive(True) self.treeViewButton", "= self.completer() comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5) popup = comp.popup() popup.setStyleSheet(hou.qt.styleSheet())", "middleLayout = QHBoxLayout() middleLayout.setContentsMargins(4, 0, 0, 4) middleLayout.setSpacing(4) self.viewLayout =", "26) self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18)) self.fuzzySearchButton.setIconSize(QSize(18, 18))", "== self.lineEdit(): if event.type() == QEvent.MouseButtonRelease and isRevertToDefaultEvent(event): self.clearEditText() event.accept()", "Qt.Key_F3: self.searchField.setFocus() elif mod == Qt.ControlModifier and key == Qt.Key_Equal:", "event.modifiers() == Qt.ControlModifier and event.button() == Qt.MiddleButton class Slider(QSlider): def", "bottomLayout.addSpacerItem(spacer) self.scaleSlider = Slider() self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90)", "mod == Qt.NoModifier and key == Qt.Key_F1: pass else: super(ContentBrowser,", "self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton) self.tableViewButton = QPushButton() self.tableViewButton.setFixedSize(26, 26) self.tableViewButton.setToolTip('Table View\\tCtrl+2')", "settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18)) settingsButton.setIconSize(QSize(18, 18)) bottomLayout.addWidget(settingsButton) spacer = QSpacerItem(0, 0,", "False def revertToDefault(self): self.setValue(self.defaultValue) def setDefaultValue(self, value, reset=True): self.defaultValue =", "self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide() self.treeViewButton.setChecked(True) def switchToTableView(self): self.viewLayout.setCurrentIndex(1) self.scaleSlider.show() self.tableViewButton.setChecked(True) def keyPressEvent(self,", "self.lineEdit(): if event.type() == QEvent.MouseButtonRelease and isRevertToDefaultEvent(event): self.clearEditText() event.accept() return", "event = QMouseEvent(QEvent.MouseButtonPress, event.pos(), Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier) super(Slider, self).mousePressEvent(event) def", "super(BrowserMode, self).__init__() class BrowserTreeView(QTreeView): def __init__(self, parent=None): super(BrowserTreeView, self).__init__(parent) self.setAlternatingRowColors(True)", "not self.valueLadderMode and event.buttons() == Qt.MiddleButton: try: hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int)", "self.tableViewButton.setChecked(True) def keyPressEvent(self, event): if False: # Type hint event", "self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18)) self.fuzzySearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton = QPushButton()", "data_type=hou.valueLadderDataType.Int) except hou.OperationFailed: return else: self.valueLadderMode = True elif self.valueLadderMode:", "elif event.button() == Qt.LeftButton: event = QMouseEvent(QEvent.MouseButtonPress, event.pos(), Qt.MiddleButton, Qt.MiddleButton,", "== Qt.MiddleButton class Slider(QSlider): def __init__(self, orientation=Qt.Horizontal, parent=None): super(Slider, self).__init__(orientation,", "mainLayout.setContentsMargins(0, 0, 0, 0) mainLayout.setSpacing(4) mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout) def switchToTreeView(self):", "self.setAlternatingRowColors(True) class BrowserTableView(QListView): def __init__(self, parent=None): super(BrowserTableView, self).__init__(parent) self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust)", "import * from PySide2.QtGui import * from PySide2.QtCore import *", "event = QEvent if watched == self.lineEdit(): if event.type() ==", "/ 100)) bottomLayout.addWidget(self.scaleSlider) mainLayout = QVBoxLayout(self) mainLayout.setContentsMargins(0, 0, 0, 0)", "Qt.Key_F1: pass else: super(ContentBrowser, self).keyPressEvent(event) if __name__ == '__main__': app", "== Qt.NoModifier and key == Qt.Key_F1: pass else: super(ContentBrowser, self).keyPressEvent(event)", "Browser') self.setProperty('houdiniStyle', True) topLayout = QHBoxLayout() topLayout.setContentsMargins(4, 4, 4, 2)", "isRevertToDefaultEvent(event): self.clearEditText() def eventFilter(self, watched, event): if False: # Type", "hou.ui.updateValueLadder(event.globalX(), event.globalY(), bool(event.modifiers() & Qt.AltModifier), bool(event.modifiers() & Qt.ShiftModifier)) else: super(Slider,", "self.addAction(createAction(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self, 'Table View', self.switchToTableView,", "self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120, 90)) self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu) class ContentBrowser(QWidget): def", "watched == self.lineEdit(): if event.type() == QEvent.MouseButtonRelease and isRevertToDefaultEvent(event): self.clearEditText()", "mainLayout = QVBoxLayout(self) mainLayout.setContentsMargins(0, 0, 0, 0) mainLayout.setSpacing(4) mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout)", "View\\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18)) self.tableViewButton.setIconSize(QSize(18, 18)) self.tableViewButton.setCheckable(True) self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton)", "18)) self.tableViewButton.setIconSize(QSize(18, 18)) self.tableViewButton.setCheckable(True) self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep) self.searchField =", "and event.button() == Qt.MiddleButton class Slider(QSlider): def __init__(self, orientation=Qt.Horizontal, parent=None):", "self.revertToDefault() def mousePressEvent(self, event): if False: # Type hint event" ]
[ "main(): ip_port = ('192.168.3.188', 8888) if len(sys.argv) < 2: port", "') #loop = loop -1 #recv_data = udp_socket.recvfrom(1024) #print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8'))", "= udp_socket.recvfrom(1024) #print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8')) #print('.', end=' ') #data = recv_data.decode('utf-8')", "> 0: #loop = 10 #while loop > 0: for", "end=' ') #data = recv_data.decode('utf-8') #print('0x%x'%data) cnt = cnt -", "#recv_data = udp_socket.recvfrom(1024) #print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8')) #print('.', end=' ') #data =", "loop = 4 print(\"send %d...\", cnt*loop) # 3. 接收发送的数据 while", "print(\"send %d...\", cnt*loop) # 3. 接收发送的数据 while cnt > 0:", "in range(0, loop): udp_socket.sendto(smsg, ip_port) print('.', end=' ') #loop =", "#data = recv_data.decode('utf-8') #print('0x%x'%data) cnt = cnt - 1 time.sleep(0.005)", "# 2. 绑定本地信息 udp_socket.bind(('', port)) cnt = 100 loop =", "import sys smsg = b'\\xaa\\x08\\xfe\\x00\\xc9\\xe6\\x5f\\xee' def main(): ip_port = ('192.168.3.188',", "UDP sender \"\"\" import socket import time import sys smsg", "loop): udp_socket.sendto(smsg, ip_port) print('.', end=' ') #loop = loop -1", "1 time.sleep(0.005) print(\"\") print(\"finished\") # 7. 关闭套接字 udp_socket.close() print(\"close\") if", "8888) if len(sys.argv) < 2: port = 8888 else: port", "- 1 time.sleep(0.005) print(\"\") print(\"finished\") # 7. 关闭套接字 udp_socket.close() print(\"close\")", "#!/usr/bin/python3 \"\"\" UDP sender \"\"\" import socket import time import", "= 4 print(\"send %d...\", cnt*loop) # 3. 接收发送的数据 while cnt", "= cnt - 1 time.sleep(0.005) print(\"\") print(\"finished\") # 7. 关闭套接字", "10 #while loop > 0: for i in range(0, loop):", "#print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8')) #print('.', end=' ') #data = recv_data.decode('utf-8') #print('0x%x'%data) cnt", "3. 接收发送的数据 while cnt > 0: #loop = 10 #while", "def main(): ip_port = ('192.168.3.188', 8888) if len(sys.argv) < 2:", "= int(sys.argv[1]) # 1. 创建 udp 套接字 udp_socket = socket.socket(socket.AF_INET,", "int(sys.argv[1]) # 1. 创建 udp 套接字 udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)", "sys smsg = b'\\xaa\\x08\\xfe\\x00\\xc9\\xe6\\x5f\\xee' def main(): ip_port = ('192.168.3.188', 8888)", "else: port = int(sys.argv[1]) # 1. 创建 udp 套接字 udp_socket", "for i in range(0, loop): udp_socket.sendto(smsg, ip_port) print('.', end=' ')", "# 3. 接收发送的数据 while cnt > 0: #loop = 10", "sender \"\"\" import socket import time import sys smsg =", "\"\"\" UDP sender \"\"\" import socket import time import sys", "#print('0x%x'%data) cnt = cnt - 1 time.sleep(0.005) print(\"\") print(\"finished\") #", "1. 创建 udp 套接字 udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 2.", "100 loop = 4 print(\"send %d...\", cnt*loop) # 3. 接收发送的数据", "4 print(\"send %d...\", cnt*loop) # 3. 接收发送的数据 while cnt >", "print(\"finished\") # 7. 关闭套接字 udp_socket.close() print(\"close\") if __name__ == '__main__':", "cnt - 1 time.sleep(0.005) print(\"\") print(\"finished\") # 7. 关闭套接字 udp_socket.close()", "') #data = recv_data.decode('utf-8') #print('0x%x'%data) cnt = cnt - 1", "= loop -1 #recv_data = udp_socket.recvfrom(1024) #print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8')) #print('.', end='", "while cnt > 0: #loop = 10 #while loop >", "len(sys.argv) < 2: port = 8888 else: port = int(sys.argv[1])", "socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 2. 绑定本地信息 udp_socket.bind(('', port)) cnt = 100", "= ('192.168.3.188', 8888) if len(sys.argv) < 2: port = 8888", "= 100 loop = 4 print(\"send %d...\", cnt*loop) # 3.", "# 1. 创建 udp 套接字 udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) #", "0: #loop = 10 #while loop > 0: for i", "loop > 0: for i in range(0, loop): udp_socket.sendto(smsg, ip_port)", "\"\"\" import socket import time import sys smsg = b'\\xaa\\x08\\xfe\\x00\\xc9\\xe6\\x5f\\xee'", "('192.168.3.188', 8888) if len(sys.argv) < 2: port = 8888 else:", "range(0, loop): udp_socket.sendto(smsg, ip_port) print('.', end=' ') #loop = loop", "创建 udp 套接字 udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 2. 绑定本地信息", "port = int(sys.argv[1]) # 1. 创建 udp 套接字 udp_socket =", "recv_data.decode('utf-8') #print('0x%x'%data) cnt = cnt - 1 time.sleep(0.005) print(\"\") print(\"finished\")", "time import sys smsg = b'\\xaa\\x08\\xfe\\x00\\xc9\\xe6\\x5f\\xee' def main(): ip_port =", "= 10 #while loop > 0: for i in range(0,", "ip_port = ('192.168.3.188', 8888) if len(sys.argv) < 2: port =", "import socket import time import sys smsg = b'\\xaa\\x08\\xfe\\x00\\xc9\\xe6\\x5f\\xee' def", "= 8888 else: port = int(sys.argv[1]) # 1. 创建 udp", "udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 2. 绑定本地信息 udp_socket.bind(('', port)) cnt", "# 7. 关闭套接字 udp_socket.close() print(\"close\") if __name__ == '__main__': main()", "-1 #recv_data = udp_socket.recvfrom(1024) #print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8')) #print('.', end=' ') #data", "smsg = b'\\xaa\\x08\\xfe\\x00\\xc9\\xe6\\x5f\\xee' def main(): ip_port = ('192.168.3.188', 8888) if", "udp 套接字 udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 2. 绑定本地信息 udp_socket.bind(('',", "8888 else: port = int(sys.argv[1]) # 1. 创建 udp 套接字", "= b'\\xaa\\x08\\xfe\\x00\\xc9\\xe6\\x5f\\xee' def main(): ip_port = ('192.168.3.188', 8888) if len(sys.argv)", "i in range(0, loop): udp_socket.sendto(smsg, ip_port) print('.', end=' ') #loop", "print('.', end=' ') #loop = loop -1 #recv_data = udp_socket.recvfrom(1024)", "import time import sys smsg = b'\\xaa\\x08\\xfe\\x00\\xc9\\xe6\\x5f\\xee' def main(): ip_port", "= socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 2. 绑定本地信息 udp_socket.bind(('', port)) cnt =", "2. 绑定本地信息 udp_socket.bind(('', port)) cnt = 100 loop = 4", "if len(sys.argv) < 2: port = 8888 else: port =", "> 0: for i in range(0, loop): udp_socket.sendto(smsg, ip_port) print('.',", "2: port = 8888 else: port = int(sys.argv[1]) # 1.", "print(\"\") print(\"finished\") # 7. 关闭套接字 udp_socket.close() print(\"close\") if __name__ ==", "port)) cnt = 100 loop = 4 print(\"send %d...\", cnt*loop)", "cnt = cnt - 1 time.sleep(0.005) print(\"\") print(\"finished\") # 7.", "loop -1 #recv_data = udp_socket.recvfrom(1024) #print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8')) #print('.', end=' ')", "#while loop > 0: for i in range(0, loop): udp_socket.sendto(smsg,", "绑定本地信息 udp_socket.bind(('', port)) cnt = 100 loop = 4 print(\"send", "套接字 udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 2. 绑定本地信息 udp_socket.bind(('', port))", "socket.SOCK_DGRAM) # 2. 绑定本地信息 udp_socket.bind(('', port)) cnt = 100 loop", "#print(recv_data.decode('utf-8')) #print('.', end=' ') #data = recv_data.decode('utf-8') #print('0x%x'%data) cnt =", "#loop = 10 #while loop > 0: for i in", "接收发送的数据 while cnt > 0: #loop = 10 #while loop", "%d...\", cnt*loop) # 3. 接收发送的数据 while cnt > 0: #loop", "cnt > 0: #loop = 10 #while loop > 0:", "udp_socket.bind(('', port)) cnt = 100 loop = 4 print(\"send %d...\",", "udp_socket.sendto(smsg, ip_port) print('.', end=' ') #loop = loop -1 #recv_data", "0: for i in range(0, loop): udp_socket.sendto(smsg, ip_port) print('.', end='", "< 2: port = 8888 else: port = int(sys.argv[1]) #", "udp_socket.recvfrom(1024) #print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8')) #print('.', end=' ') #data = recv_data.decode('utf-8') #print('0x%x'%data)", "cnt = 100 loop = 4 print(\"send %d...\", cnt*loop) #", "ip_port) print('.', end=' ') #loop = loop -1 #recv_data =", "time.sleep(0.005) print(\"\") print(\"finished\") # 7. 关闭套接字 udp_socket.close() print(\"close\") if __name__", "end=' ') #loop = loop -1 #recv_data = udp_socket.recvfrom(1024) #print(recv_data.decode('gbk'))", "= recv_data.decode('utf-8') #print('0x%x'%data) cnt = cnt - 1 time.sleep(0.005) print(\"\")", "socket import time import sys smsg = b'\\xaa\\x08\\xfe\\x00\\xc9\\xe6\\x5f\\xee' def main():", "port = 8888 else: port = int(sys.argv[1]) # 1. 创建", "#loop = loop -1 #recv_data = udp_socket.recvfrom(1024) #print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8')) #print('.',", "cnt*loop) # 3. 接收发送的数据 while cnt > 0: #loop =", "b'\\xaa\\x08\\xfe\\x00\\xc9\\xe6\\x5f\\xee' def main(): ip_port = ('192.168.3.188', 8888) if len(sys.argv) <", "#print('.', end=' ') #data = recv_data.decode('utf-8') #print('0x%x'%data) cnt = cnt" ]
[ "_get_json(self): return self.data def __repr__(self): return ( f\"<Role id={self.id} name={self.name}>\"", "__init__(self, data): self.data = data self._update(data) def _get_json(self): return self.data", "guild_id(self): return self._guild_id @property def mentionable(self): return self._mentionable @property def", "( f\"{self.name}\" ) def _update(self, data): self._id = data[\"id\"] self._color", "def mentionable(self): return self._mentionable @property def position(self): return self._position @property", "f\"{self.name}\" ) def _update(self, data): self._id = data[\"id\"] self._color =", "def __init__(self, data): self.data = data self._update(data) def _get_json(self): return", "class Role: def __init__(self, data): self.data = data self._update(data) def", "self._color = data[\"color\"] self._managed = data[\"managed\"] self._name = data[\"name\"] self._guild_id", "def __str__(self): return ( f\"{self.name}\" ) def _update(self, data): self._id", "data[\"managed\"] self._name = data[\"name\"] self._guild_id = data[\"guild_id\"] self._mentionable = data[\"mentionable\"]", "data[\"potition\"] self._hoisted = data[\"hoisted\"] @property def id(self): return self._id @property", ") def _update(self, data): self._id = data[\"id\"] self._color = data[\"color\"]", "self._guild_id = data[\"guild_id\"] self._mentionable = data[\"mentionable\"] self._position = data[\"potition\"] self._hoisted", "@property def id(self): return self._id @property def color(self): return self._color", "__all__ = ( \"Role\", ) class Role: def __init__(self, data):", "def __repr__(self): return ( f\"<Role id={self.id} name={self.name}>\" ) def __str__(self):", "id(self): return self._id @property def color(self): return self._color @property def", ") class Role: def __init__(self, data): self.data = data self._update(data)", "@property def mentionable(self): return self._mentionable @property def position(self): return self._position", "= data[\"guild_id\"] self._mentionable = data[\"mentionable\"] self._position = data[\"potition\"] self._hoisted =", "\"Role\", ) class Role: def __init__(self, data): self.data = data", "def guild_id(self): return self._guild_id @property def mentionable(self): return self._mentionable @property", "= ( \"Role\", ) class Role: def __init__(self, data): self.data", "self._managed @property def name(self): return self._name @property def guild_id(self): return", "def managed(self): return self._managed @property def name(self): return self._name @property", "= data[\"potition\"] self._hoisted = data[\"hoisted\"] @property def id(self): return self._id", "= data[\"hoisted\"] @property def id(self): return self._id @property def color(self):", "= data self._update(data) def _get_json(self): return self.data def __repr__(self): return", "data[\"mentionable\"] self._position = data[\"potition\"] self._hoisted = data[\"hoisted\"] @property def id(self):", "<gh_stars>1-10 __all__ = ( \"Role\", ) class Role: def __init__(self,", "= data[\"color\"] self._managed = data[\"managed\"] self._name = data[\"name\"] self._guild_id =", "@property def color(self): return self._color @property def managed(self): return self._managed", "self._position = data[\"potition\"] self._hoisted = data[\"hoisted\"] @property def id(self): return", "self._color @property def managed(self): return self._managed @property def name(self): return", "return self._id @property def color(self): return self._color @property def managed(self):", "color(self): return self._color @property def managed(self): return self._managed @property def", "self._hoisted = data[\"hoisted\"] @property def id(self): return self._id @property def", "_update(self, data): self._id = data[\"id\"] self._color = data[\"color\"] self._managed =", "self._update(data) def _get_json(self): return self.data def __repr__(self): return ( f\"<Role", "return self.data def __repr__(self): return ( f\"<Role id={self.id} name={self.name}>\" )", "name={self.name}>\" ) def __str__(self): return ( f\"{self.name}\" ) def _update(self,", "= data[\"name\"] self._guild_id = data[\"guild_id\"] self._mentionable = data[\"mentionable\"] self._position =", "def name(self): return self._name @property def guild_id(self): return self._guild_id @property", "self.data = data self._update(data) def _get_json(self): return self.data def __repr__(self):", "name(self): return self._name @property def guild_id(self): return self._guild_id @property def", "def color(self): return self._color @property def managed(self): return self._managed @property", "data[\"guild_id\"] self._mentionable = data[\"mentionable\"] self._position = data[\"potition\"] self._hoisted = data[\"hoisted\"]", "return self._name @property def guild_id(self): return self._guild_id @property def mentionable(self):", "def id(self): return self._id @property def color(self): return self._color @property", "return ( f\"<Role id={self.id} name={self.name}>\" ) def __str__(self): return (", "self._id @property def color(self): return self._color @property def managed(self): return", "managed(self): return self._managed @property def name(self): return self._name @property def", "f\"<Role id={self.id} name={self.name}>\" ) def __str__(self): return ( f\"{self.name}\" )", "@property def position(self): return self._position @property def hoisted(self): return self._hoisted", "return self._managed @property def name(self): return self._name @property def guild_id(self):", "@property def guild_id(self): return self._guild_id @property def mentionable(self): return self._mentionable", "self._name @property def guild_id(self): return self._guild_id @property def mentionable(self): return", "return self._guild_id @property def mentionable(self): return self._mentionable @property def position(self):", "return ( f\"{self.name}\" ) def _update(self, data): self._id = data[\"id\"]", "data[\"hoisted\"] @property def id(self): return self._id @property def color(self): return", "@property def name(self): return self._name @property def guild_id(self): return self._guild_id", "def _update(self, data): self._id = data[\"id\"] self._color = data[\"color\"] self._managed", "data[\"id\"] self._color = data[\"color\"] self._managed = data[\"managed\"] self._name = data[\"name\"]", "@property def managed(self): return self._managed @property def name(self): return self._name", "( \"Role\", ) class Role: def __init__(self, data): self.data =", "data): self.data = data self._update(data) def _get_json(self): return self.data def", "def _get_json(self): return self.data def __repr__(self): return ( f\"<Role id={self.id}", "Role: def __init__(self, data): self.data = data self._update(data) def _get_json(self):", "data[\"name\"] self._guild_id = data[\"guild_id\"] self._mentionable = data[\"mentionable\"] self._position = data[\"potition\"]", "self._managed = data[\"managed\"] self._name = data[\"name\"] self._guild_id = data[\"guild_id\"] self._mentionable", "self._mentionable = data[\"mentionable\"] self._position = data[\"potition\"] self._hoisted = data[\"hoisted\"] @property", "self._name = data[\"name\"] self._guild_id = data[\"guild_id\"] self._mentionable = data[\"mentionable\"] self._position", "__repr__(self): return ( f\"<Role id={self.id} name={self.name}>\" ) def __str__(self): return", "( f\"<Role id={self.id} name={self.name}>\" ) def __str__(self): return ( f\"{self.name}\"", "id={self.id} name={self.name}>\" ) def __str__(self): return ( f\"{self.name}\" ) def", "self._id = data[\"id\"] self._color = data[\"color\"] self._managed = data[\"managed\"] self._name", "data): self._id = data[\"id\"] self._color = data[\"color\"] self._managed = data[\"managed\"]", "= data[\"managed\"] self._name = data[\"name\"] self._guild_id = data[\"guild_id\"] self._mentionable =", "return self._color @property def managed(self): return self._managed @property def name(self):", "= data[\"id\"] self._color = data[\"color\"] self._managed = data[\"managed\"] self._name =", "__str__(self): return ( f\"{self.name}\" ) def _update(self, data): self._id =", ") def __str__(self): return ( f\"{self.name}\" ) def _update(self, data):", "return self._mentionable @property def position(self): return self._position @property def hoisted(self):", "data[\"color\"] self._managed = data[\"managed\"] self._name = data[\"name\"] self._guild_id = data[\"guild_id\"]", "= data[\"mentionable\"] self._position = data[\"potition\"] self._hoisted = data[\"hoisted\"] @property def", "mentionable(self): return self._mentionable @property def position(self): return self._position @property def", "data self._update(data) def _get_json(self): return self.data def __repr__(self): return (", "self._guild_id @property def mentionable(self): return self._mentionable @property def position(self): return", "self._mentionable @property def position(self): return self._position @property def hoisted(self): return", "self.data def __repr__(self): return ( f\"<Role id={self.id} name={self.name}>\" ) def" ]
[ "import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from django.conf import settings from django.conf.urls.static import static", "2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) \"\"\" from", "import include, path 2. Add a URL to urlpatterns: path('blog/',", "routers.DefaultRouter() router.register(r'hide',HideViewSet, base_name='hiddinn') router.register(r'draft',DraftViewSet, base_name='draft') router.register(r'post', PostViewSet, base_name='post') router.register(r'comment', CommentViewSet,", "path('', Home.as_view(), name='home') Including another URLconf 1. Import the include()", "2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including", "TagViewSet, base_name='tags') router.register(r'category', CategoryViewSet, base_name='category') from django.contrib import admin from", "admin from django.urls import path urlpatterns = [ path('admin/', admin.site.urls),", "base_name='category') from django.contrib import admin from django.urls import path urlpatterns", "include() function: from django.urls import include, path 2. Add a", "base_name='post') router.register(r'comment', CommentViewSet, base_name='comment') router.register(r'tags', TagViewSet, base_name='tags') router.register(r'category', CategoryViewSet, base_name='category')", "routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/", "Add an import: from my_app import views 2. Add a", "URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1.", "to urlpatterns: path('blog/', include('blog.urls')) \"\"\" from django.conf.urls import url, include", "path('admin/', admin.site.urls), url(r'^', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ] urlpatterns.extend( static(settings.STATIC_URL,", "Add a URL to urlpatterns: path('', views.home, name='home') Class-based views", "to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import", "\"\"\"project URL Configuration The `urlpatterns` list routes URLs to views.", "1. Add an import: from other_app.views import Home 2. Add", "base_name='hiddinn') router.register(r'draft',DraftViewSet, base_name='draft') router.register(r'post', PostViewSet, base_name='post') router.register(r'comment', CommentViewSet, base_name='comment') router.register(r'tags',", "admin.site.urls), url(r'^', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ] urlpatterns.extend( static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)", "an import: from other_app.views import Home 2. Add a URL", "views. For more information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views", "blog import views from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from django.conf import", "URL to urlpatterns: path('blog/', include('blog.urls')) \"\"\" from django.conf.urls import url,", "router.register(r'post', PostViewSet, base_name='post') router.register(r'comment', CommentViewSet, base_name='comment') router.register(r'tags', TagViewSet, base_name='tags') router.register(r'category',", "Home.as_view(), name='home') Including another URLconf 1. Import the include() function:", "import settings from django.conf.urls.static import static router = routers.DefaultRouter() router.register(r'hide',HideViewSet,", "from django.contrib import admin from django.urls import path urlpatterns =", "name='home') Class-based views 1. Add an import: from other_app.views import", "function: from django.urls import include, path 2. Add a URL", "import views 2. Add a URL to urlpatterns: path('', views.home,", "django.urls import path urlpatterns = [ path('admin/', admin.site.urls), url(r'^', include(router.urls)),", "base_name='draft') router.register(r'post', PostViewSet, base_name='post') router.register(r'comment', CommentViewSet, base_name='comment') router.register(r'tags', TagViewSet, base_name='tags')", "\"\"\" from django.conf.urls import url, include from rest_framework import routers", "Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')", "The `urlpatterns` list routes URLs to views. For more information", "Add an import: from other_app.views import Home 2. Add a", "import Home 2. Add a URL to urlpatterns: path('', Home.as_view(),", "from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from django.conf import settings from django.conf.urls.static", "include('rest_framework.urls', namespace='rest_framework')) ] urlpatterns.extend( static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) )", "router = routers.DefaultRouter() router.register(r'hide',HideViewSet, base_name='hiddinn') router.register(r'draft',DraftViewSet, base_name='draft') router.register(r'post', PostViewSet, base_name='post')", "views 1. Add an import: from other_app.views import Home 2.", "urlpatterns: path('blog/', include('blog.urls')) \"\"\" from django.conf.urls import url, include from", "include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))", "include from rest_framework import routers from blog import views from", "= routers.DefaultRouter() router.register(r'hide',HideViewSet, base_name='hiddinn') router.register(r'draft',DraftViewSet, base_name='draft') router.register(r'post', PostViewSet, base_name='post') router.register(r'comment',", "path('blog/', include('blog.urls')) \"\"\" from django.conf.urls import url, include from rest_framework", "PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from django.conf import settings from django.conf.urls.static import static router", "other_app.views import Home 2. Add a URL to urlpatterns: path('',", "import path urlpatterns = [ path('admin/', admin.site.urls), url(r'^', include(router.urls)), url(r'^api-auth/',", "base_name='comment') router.register(r'tags', TagViewSet, base_name='tags') router.register(r'category', CategoryViewSet, base_name='category') from django.contrib import", "list routes URLs to views. For more information please see:", "Class-based views 1. Add an import: from other_app.views import Home", "Examples: Function views 1. Add an import: from my_app import", "the include() function: from django.urls import include, path 2. Add", "blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from django.conf import settings from django.conf.urls.static import", "django.conf.urls import url, include from rest_framework import routers from blog", "path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) \"\"\"", "from django.conf import settings from django.conf.urls.static import static router =", "include('blog.urls')) \"\"\" from django.conf.urls import url, include from rest_framework import", "see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views 1. Add an import: from", "[ path('admin/', admin.site.urls), url(r'^', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ] urlpatterns.extend(", "URLs to views. For more information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples:", "router.register(r'draft',DraftViewSet, base_name='draft') router.register(r'post', PostViewSet, base_name='post') router.register(r'comment', CommentViewSet, base_name='comment') router.register(r'tags', TagViewSet,", "settings from django.conf.urls.static import static router = routers.DefaultRouter() router.register(r'hide',HideViewSet, base_name='hiddinn')", "to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an", "router.register(r'comment', CommentViewSet, base_name='comment') router.register(r'tags', TagViewSet, base_name='tags') router.register(r'category', CategoryViewSet, base_name='category') from", "import: from other_app.views import Home 2. Add a URL to", "1. Add an import: from my_app import views 2. Add", "urlpatterns = [ path('admin/', admin.site.urls), url(r'^', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))", "from my_app import views 2. Add a URL to urlpatterns:", "from django.conf.urls import url, include from rest_framework import routers from", "url, include from rest_framework import routers from blog import views", "django.conf.urls.static import static router = routers.DefaultRouter() router.register(r'hide',HideViewSet, base_name='hiddinn') router.register(r'draft',DraftViewSet, base_name='draft')", "= [ path('admin/', admin.site.urls), url(r'^', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ]", "from django.conf.urls.static import static router = routers.DefaultRouter() router.register(r'hide',HideViewSet, base_name='hiddinn') router.register(r'draft',DraftViewSet,", "import: from my_app import views 2. Add a URL to", "import routers from blog import views from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet", "from blog import views from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from django.conf", "Import the include() function: from django.urls import include, path 2.", "URL Configuration The `urlpatterns` list routes URLs to views. For", "For more information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views 1.", "url(r'^', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ] urlpatterns.extend( static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) +", "1. Import the include() function: from django.urls import include, path", "another URLconf 1. Import the include() function: from django.urls import", "views 2. Add a URL to urlpatterns: path('', views.home, name='home')", "CommentViewSet, base_name='comment') router.register(r'tags', TagViewSet, base_name='tags') router.register(r'category', CategoryViewSet, base_name='category') from django.contrib", "import url, include from rest_framework import routers from blog import", "url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ] urlpatterns.extend( static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)", "from django.urls import include, path 2. Add a URL to", "my_app import views 2. Add a URL to urlpatterns: path('',", "information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views 1. Add an", "urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import:", "a URL to urlpatterns: path('', views.home, name='home') Class-based views 1.", "django.urls import include, path 2. Add a URL to urlpatterns:", "name='home') Including another URLconf 1. Import the include() function: from", "from other_app.views import Home 2. Add a URL to urlpatterns:", "path('', views.home, name='home') Class-based views 1. Add an import: from", "more information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views 1. Add", "Add a URL to urlpatterns: path('blog/', include('blog.urls')) \"\"\" from django.conf.urls", "static router = routers.DefaultRouter() router.register(r'hide',HideViewSet, base_name='hiddinn') router.register(r'draft',DraftViewSet, base_name='draft') router.register(r'post', PostViewSet,", "URLconf 1. Import the include() function: from django.urls import include,", "path urlpatterns = [ path('admin/', admin.site.urls), url(r'^', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls',", "import admin from django.urls import path urlpatterns = [ path('admin/',", "PostViewSet, base_name='post') router.register(r'comment', CommentViewSet, base_name='comment') router.register(r'tags', TagViewSet, base_name='tags') router.register(r'category', CategoryViewSet,", "Configuration The `urlpatterns` list routes URLs to views. For more", "router.register(r'category', CategoryViewSet, base_name='category') from django.contrib import admin from django.urls import", "router.register(r'tags', TagViewSet, base_name='tags') router.register(r'category', CategoryViewSet, base_name='category') from django.contrib import admin", "URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add", "import views from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from django.conf import settings", "views 1. Add an import: from my_app import views 2.", "base_name='tags') router.register(r'category', CategoryViewSet, base_name='category') from django.contrib import admin from django.urls", "Including another URLconf 1. Import the include() function: from django.urls", "Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another", "2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based", "Function views 1. Add an import: from my_app import views", "router.register(r'hide',HideViewSet, base_name='hiddinn') router.register(r'draft',DraftViewSet, base_name='draft') router.register(r'post', PostViewSet, base_name='post') router.register(r'comment', CommentViewSet, base_name='comment')", "include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ] urlpatterns.extend( static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL,", "https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views 1. Add an import: from my_app", "please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views 1. Add an import:", "`urlpatterns` list routes URLs to views. For more information please", "to views. For more information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function", "an import: from my_app import views 2. Add a URL", "import static router = routers.DefaultRouter() router.register(r'hide',HideViewSet, base_name='hiddinn') router.register(r'draft',DraftViewSet, base_name='draft') router.register(r'post',", "views.home, name='home') Class-based views 1. Add an import: from other_app.views", "a URL to urlpatterns: path('blog/', include('blog.urls')) \"\"\" from django.conf.urls import", "a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf", "django.contrib import admin from django.urls import path urlpatterns = [", "urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the", "rest_framework import routers from blog import views from blog.views import", "routers from blog import views from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from", "views from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from django.conf import settings from", "from rest_framework import routers from blog import views from blog.views", "CategoryViewSet, base_name='category') from django.contrib import admin from django.urls import path", "django.conf import settings from django.conf.urls.static import static router = routers.DefaultRouter()", "from django.urls import path urlpatterns = [ path('admin/', admin.site.urls), url(r'^'," ]
[]
[ "self.variable.assign(value) if self.verbose > 0: print(f'\\nEpoch {epoch + 1}: VariableScheduler", "indexed from 0) and current variable value as input and", "super(VariableScheduler, self).__init__() self.variable = variable self.schedule = schedule self.verbose =", "to the variable as output. verbose: int. 0: quiet, 1:", "and current variable value as input and returns a new", "schedule self.verbose = verbose def on_epoch_begin(self, epoch, logs=None): value =", "the variable as output. verbose: int. 0: quiet, 1: update", "1: update messages. \"\"\" def __init__(self, variable, schedule, verbose=0): super(VariableScheduler,", "training. Arguments: variable: The variable to modify the value of.", "self.variable.read_value() value = self.schedule(epoch, value) self.variable.assign(value) if self.verbose > 0:", "and returns a new value to assign to the variable", "as output. verbose: int. 0: quiet, 1: update messages. \"\"\"", "\"\"\" def __init__(self, variable, schedule, verbose=0): super(VariableScheduler, self).__init__() self.variable =", "value) self.variable.assign(value) if self.verbose > 0: print(f'\\nEpoch {epoch + 1}:", "variable to modify the value of. schedule: A function that", "an epoch index (integer, indexed from 0) and current variable", "class VariableScheduler(tf.keras.callbacks.Callback): \"\"\"Schedules an arbitary variable during training. Arguments: variable:", "variable, schedule, verbose=0): super(VariableScheduler, self).__init__() self.variable = variable self.schedule =", "messages. \"\"\" def __init__(self, variable, schedule, verbose=0): super(VariableScheduler, self).__init__() self.variable", "> 0: print(f'\\nEpoch {epoch + 1}: VariableScheduler assigning ' f'variable", "__init__(self, variable, schedule, verbose=0): super(VariableScheduler, self).__init__() self.variable = variable self.schedule", "self.schedule(epoch, value) self.variable.assign(value) if self.verbose > 0: print(f'\\nEpoch {epoch +", "tf class VariableScheduler(tf.keras.callbacks.Callback): \"\"\"Schedules an arbitary variable during training. Arguments:", "self.verbose = verbose def on_epoch_begin(self, epoch, logs=None): value = self.variable.read_value()", "as input and returns a new value to assign to", "tensorflow as tf class VariableScheduler(tf.keras.callbacks.Callback): \"\"\"Schedules an arbitary variable during", "output. verbose: int. 0: quiet, 1: update messages. \"\"\" def", "an arbitary variable during training. Arguments: variable: The variable to", "The variable to modify the value of. schedule: A function", "new value to assign to the variable as output. verbose:", "def on_epoch_begin(self, epoch, logs=None): value = self.variable.read_value() value = self.schedule(epoch,", "variable as output. verbose: int. 0: quiet, 1: update messages.", "A function that takes an epoch index (integer, indexed from", "= verbose def on_epoch_begin(self, epoch, logs=None): value = self.variable.read_value() value", "int. 0: quiet, 1: update messages. \"\"\" def __init__(self, variable,", "returns a new value to assign to the variable as", "def __init__(self, variable, schedule, verbose=0): super(VariableScheduler, self).__init__() self.variable = variable", "arbitary variable during training. Arguments: variable: The variable to modify", "input and returns a new value to assign to the", "modify the value of. schedule: A function that takes an", "value as input and returns a new value to assign", "a new value to assign to the variable as output.", "value to assign to the variable as output. verbose: int.", "quiet, 1: update messages. \"\"\" def __init__(self, variable, schedule, verbose=0):", "import tensorflow as tf class VariableScheduler(tf.keras.callbacks.Callback): \"\"\"Schedules an arbitary variable", "update messages. \"\"\" def __init__(self, variable, schedule, verbose=0): super(VariableScheduler, self).__init__()", "assign to the variable as output. verbose: int. 0: quiet,", "verbose: int. 0: quiet, 1: update messages. \"\"\" def __init__(self,", "from 0) and current variable value as input and returns", "{epoch + 1}: VariableScheduler assigning ' f'variable {self.variable.name} to {value}.')", "variable: The variable to modify the value of. schedule: A", "verbose def on_epoch_begin(self, epoch, logs=None): value = self.variable.read_value() value =", "schedule: A function that takes an epoch index (integer, indexed", "value = self.variable.read_value() value = self.schedule(epoch, value) self.variable.assign(value) if self.verbose", "logs=None): value = self.variable.read_value() value = self.schedule(epoch, value) self.variable.assign(value) if", "variable during training. Arguments: variable: The variable to modify the", "the value of. schedule: A function that takes an epoch", "self.verbose > 0: print(f'\\nEpoch {epoch + 1}: VariableScheduler assigning '", "of. schedule: A function that takes an epoch index (integer,", "to assign to the variable as output. verbose: int. 0:", "function that takes an epoch index (integer, indexed from 0)", "self.schedule = schedule self.verbose = verbose def on_epoch_begin(self, epoch, logs=None):", "= self.schedule(epoch, value) self.variable.assign(value) if self.verbose > 0: print(f'\\nEpoch {epoch", "(integer, indexed from 0) and current variable value as input", "if self.verbose > 0: print(f'\\nEpoch {epoch + 1}: VariableScheduler assigning", "<reponame>Andy-Wilkinson/ChemMLToolk import tensorflow as tf class VariableScheduler(tf.keras.callbacks.Callback): \"\"\"Schedules an arbitary", "value = self.schedule(epoch, value) self.variable.assign(value) if self.verbose > 0: print(f'\\nEpoch", "takes an epoch index (integer, indexed from 0) and current", "self).__init__() self.variable = variable self.schedule = schedule self.verbose = verbose", "schedule, verbose=0): super(VariableScheduler, self).__init__() self.variable = variable self.schedule = schedule", "current variable value as input and returns a new value", "= self.variable.read_value() value = self.schedule(epoch, value) self.variable.assign(value) if self.verbose >", "on_epoch_begin(self, epoch, logs=None): value = self.variable.read_value() value = self.schedule(epoch, value)", "as tf class VariableScheduler(tf.keras.callbacks.Callback): \"\"\"Schedules an arbitary variable during training.", "verbose=0): super(VariableScheduler, self).__init__() self.variable = variable self.schedule = schedule self.verbose", "during training. Arguments: variable: The variable to modify the value", "epoch index (integer, indexed from 0) and current variable value", "= variable self.schedule = schedule self.verbose = verbose def on_epoch_begin(self,", "self.variable = variable self.schedule = schedule self.verbose = verbose def", "\"\"\"Schedules an arbitary variable during training. Arguments: variable: The variable", "print(f'\\nEpoch {epoch + 1}: VariableScheduler assigning ' f'variable {self.variable.name} to", "to modify the value of. schedule: A function that takes", "index (integer, indexed from 0) and current variable value as", "Arguments: variable: The variable to modify the value of. schedule:", "VariableScheduler(tf.keras.callbacks.Callback): \"\"\"Schedules an arbitary variable during training. Arguments: variable: The", "0: quiet, 1: update messages. \"\"\" def __init__(self, variable, schedule,", "value of. schedule: A function that takes an epoch index", "= schedule self.verbose = verbose def on_epoch_begin(self, epoch, logs=None): value", "variable value as input and returns a new value to", "variable self.schedule = schedule self.verbose = verbose def on_epoch_begin(self, epoch,", "epoch, logs=None): value = self.variable.read_value() value = self.schedule(epoch, value) self.variable.assign(value)", "that takes an epoch index (integer, indexed from 0) and", "0) and current variable value as input and returns a", "0: print(f'\\nEpoch {epoch + 1}: VariableScheduler assigning ' f'variable {self.variable.name}" ]
[ "#peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s += '%.1f\\t%.1f\\t%.1f\\t' % ( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s += '\\t'.join([str(x)", "fileset.get_file(type).fetch( #reference = jp.chrom, start = start, #end = jp.center+JoinedPeak.WIDTH/2):", "+= plus_model[aln.pos-start] #return (score+pseudocount)*10.**6/fileset.get_tagcount(type) #return 10.**6*fileset.get_file(type).count( #reference = jp.chrom, #start", "peak in peaks: print peak #plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) #def get_coverage(fileset,type,jp,pseudocount=0):", "= chrom self.start = int(start) self.end = int(end) self.name =", "JoinedPeak.header() for chrom,peaks in joined_peaks.items(): for peak in peaks: print", "no qualifying one npeaks = 0 joined_peaks = collections.defaultdict(list) for", "'-')) self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.plus_mapq = collections.deque([])", "[options] PEAKS CHIP INPUT [ (PEAKS CHIP INPUT) ... ]", "= name self.center = int(summit) self.score = float(score) self.chip =", "libtype).get_tags(chrom, '-')) minus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '-')) self.plus_window =", "row in r: if float(row[5]) >= 100 and float(row[8]) >=", "find closest peak to each peak in the new set", ")/self.computed_control class SlavePeak(Peak): def __init__(self,set_name,center): self.name = 'Slave' self.set_name =", "collections.defaultdict(list) filesets[set_name] = FileSet(peakfile,chipfile,controlfile) r = csv.reader(open(peakfile),delimiter='\\t') r.next() # header", "ChIP experiments, Spread is the difference between the biggest and", "in zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name = os.path.basename(peakfile).split('.')[0] peaksets[set_name] = collections.defaultdict(list) filesets[set_name] =", "in joined_peaks.items(): for jp in peaks: for j,set_name in enumerate(filesets.keys()):", "self.chip_file = chipfile self.chip_tags = pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True) self.control_file = controlfile", "#else: #score += plus_model[aln.pos-start] #return (score+pseudocount)*10.**6/fileset.get_tagcount(type) #return 10.**6*fileset.get_file(type).count( #reference =", "joined_peaks[chrom].append(JoinedPeak(peak)) else: closest.add(peak) plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) for set_name,fileset in filesets.items():", "values for the peaks. For each experinent \"X\", information about", "the window any more while self.plus_window and self.plus_window[0] < (peak.position", "(peak.position - self.peak_shift): self.minus_window.popleft() self.minus_mapq.popleft() # calculate normalized background level", "collections.defaultdict(list) self.peak_count = 0 self.plus_window = collections.deque([]) self.minus_window = collections.deque([])", "if float(row[5]) >= 100 and float(row[8]) >= 10: peakcounter +=", "fitting in the window any more while self.plus_window and self.plus_window[0]", "jp.pzpeak.position = jp.center jp.pzpeak.name = jp.name self.peaks[chrom].append(jp.pzpeak) self.peak_count += 1", "print max(peak_signals),min(peak_signals) raise return s class PeakScorer(pz.PeakContainer): def __init__(self, ip_tags,", "script finds peaks in common between multiple ChIP experiments determined", "For each experinent \"X\", information about the peaks are output:", "output from peakzilla and the remaining columns are re-calculated in", "self.control_file = controlfile self.control_tags = pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True) #print self.chip_tags, self.control_tags", "JoinedPeak(Peak): WIDTH = 0 HEADER = ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES = set()", "peaks that were called among all the ChIP experiments, Spread", "int(summit) self.score = float(score) self.chip = float(chip) self.control = float(control)", "x in [ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ]) + '\\t' peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment) else:", "# calculate normalized background level # add position to region", "pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True) #print self.chip_tags, self.control_tags def get_file(self,type): return getattr(self, '%s_file'", "#for aln in fileset.get_file(type).fetch( #reference = jp.chrom, start = start,", "<= maxdist: if closest is None or closest.dist(peak) > dist:", "identifying the peaks (Chromosome, Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies the", "self.minus_mapq.append(minus_mapq.popleft()) # get rid of old tags not fitting in", "INPUT) as input to peakzilla. This will output a table", "'+')) plus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '+')) minus_tags = collections.deque(getattr(self,'%s_tags'", "+= 1 #quantile_normalize.quantile_norm(matrix) #i = 0 #for chrom,peaks in joined_peaks.items():", "= float(fdr) def width(self): return self.end-self.start+1 class JoinedPeak(Peak): WIDTH =", "ip_tags self.control_tags = control_tags self.peak_size = peak_size self.peak_shift = (peak_size", "os.path.basename(peakfile).split('.')[0] peaksets[set_name] = collections.defaultdict(list) filesets[set_name] = FileSet(peakfile,chipfile,controlfile) r = csv.reader(open(peakfile),delimiter='\\t')", "in r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH /= len(peaksets) #", "not closest.can_add(peak): npeaks += 1 joined_peaks[chrom].append(JoinedPeak(peak)) else: closest.add(peak) plus_model,minus_model =", "'\\t' #'#Chromosome\\tPosition\\tNPeaks\\tSpread\\t' for htype in cls.HEADER_TYPES: s += '\\t'.join( htype", "+= 1 for chrom,peaks in self.peaks.items(): self.peaks[chrom] = sorted(self.peaks[chrom], lambda", "peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score) print JoinedPeak.header() for chrom,peaks in joined_peaks.items(): for peak", ">= 100 and float(row[8]) >= 10: peakcounter += 1 if", "peakcounter = 0 for row in r: if float(row[5]) >=", "#peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s += '\\t'.join([str(x) for x in #[peak.score,peak.chip,peak.fold_enrichment]]) try: if", "self.control_tags = control_tags self.peak_size = peak_size self.peak_shift = (peak_size -", "collections.deque([]) self.position = 0 def fill_scores(self,chrom,libtype,scoretype): plus_tags = collections.deque(getattr(self,'%s_tags' %", "[ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] ) + '\\t' return s def __str__(self): s", "/usr/bin/env python import os import sys import math import csv", "jp.pzpeak = pz.Peak() jp.pzpeak.size = self.peak_size jp.pzpeak.shift = self.peak_shift jp.pzpeak.position", "jp.pzpeak.name = jp.name self.peaks[chrom].append(jp.pzpeak) self.peak_count += 1 for chrom,peaks in", "join peaks [default: 10] ''' args = docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True) def", "as pz __doc__ = ''' Usage: join_peaks.py [options] PEAKS CHIP", "'\\t' return s def __str__(self): s = '' called_peaks =", "= matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment() #print jp #i += 1 ''' i", "#jp.peaks[set_name] = SlavePeak( #set_name,matrix[i][j],matrix[i][j + len(peaksets)]) #else: #jp.peaks[set_name].computed_chip = matrix[i][j]", "set_name not in jp.peaks: jp.peaks[set_name] = SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score) print", "a PEAKS file as otuput by peakzilla, and 2 BED", "= controlfile self.control_tags = pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True) #print self.chip_tags, self.control_tags def", "'NA\\tNA\\tNA\\tNA\\tNA\\tNA\\t' if hasattr(peak,'pzpeak'): s += '\\t'.join('%.2f' % x for x", "- 1) / 2 self.score_threshold = 10 self.plus_model = plus_model", "jp.chrom, start = start, #end = jp.center+JoinedPeak.WIDTH/2): #if aln.is_reverse: #score", "'\\t'.join('%.2f' % x for x in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) + '\\t' called_peaks", "PZPeak(Peak): def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr): self.set_name = set_name self.chrom = chrom", "of a peak). Options: --max-distance=DIST maximum summit distance to join", "'PZ' columns are the original output from peakzilla and the", ">= 0 and dist <= maxdist: if closest is None", "= set_name self.chrom = chrom self.start = int(start) self.end =", "for row in r: if float(row[5]) >= 100 and float(row[8])", "#end = jp.center+JoinedPeak.WIDTH/2): #if aln.is_reverse: #score += minus_model[aln.pos-start] #else: #score", "peak.mapq_score = float(sum(self.plus_mapq) + sum(self.minus_mapq) )/max(1,(len(self.plus_mapq) + len(self.minus_mapq))) #if peak.name", "or not closest.can_add(peak): npeaks += 1 joined_peaks[chrom].append(JoinedPeak(peak)) else: closest.add(peak) plus_model,minus_model", "self.end = int(end) self.name = name self.center = int(summit) self.score", "= 0 HEADER = ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES = set() def __init__(self,pzpeak):", "'%s_%d' % (self.chrom,self.center) @classmethod def header(cls): s = '\\t'.join(cls.HEADER) +", "for peak in peaks: print peak #plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) #def", "peaks in common between multiple ChIP experiments determined by peakzilla.", "= sorted(self.peaks[chrom], lambda a,b: cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom) class", "get_tagcount(self,type): return getattr(self, '%s_tags' % type) maxdist = int(args['--max-distance']) peaksets", "self.chrom == other.chrom: return abs(self.center-other.center) else: return -1 def compute_fold_enrichment(self):", "re-calculated in this script (also output regardless of the presence", "= float(sum(self.plus_mapq) + sum(self.minus_mapq) )/max(1,(len(self.plus_mapq) + len(self.minus_mapq))) #if peak.name ==", "in self.peaks.items(): if hasattr(peak,'score'): s += peak.name + '\\t' +", "self.peak_shift): self.plus_window.popleft() self.plus_mapq.popleft() while self.minus_window and self.minus_window[0] < (peak.position -", "+= '\\t'.join([str(x) for x in #[peak.score,peak.chip,peak.fold_enrichment]]) try: if len(peak_signals): s", "self.center = int(summit) self.score = float(score) self.chip = float(chip) self.control", "+ sum(self.minus_mapq) )/max(1,(len(self.plus_mapq) + len(self.minus_mapq))) #if peak.name == 'Peak_12869': #print", "3 columns identifying the peaks (Chromosome, Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks", "= collections.defaultdict(list) self.peak_count = 0 self.plus_window = collections.deque([]) self.minus_window =", "csv.reader(open(peakfile),delimiter='\\t') r.next() # header ''' #XXX: limit peaks maxpeaks =", "self.minus_window = collections.deque([]) self.position = 0 def fill_scores(self,chrom,libtype,scoretype): plus_tags =", "self.chip = float(chip) self.control = float(control) self.fold_enrichment = float(fold_enrichment) self.distribution_score", "determined by peakzilla. For each ChIP experiment, input a PEAKS", "self.minus_mapq = collections.deque([]) for peak in self.peaks[chrom]: # fill windows", "peak in the new set # make new peaks when", "s += 'NA\\tNA\\tNA\\tNA\\tNA\\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s += '%.1f\\t%.1f\\t%.1f\\t' % ( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment)", "collections.deque([]) for peak in self.peaks[chrom]: # fill windows while plus_tags", "s += 'NA\\tNA\\tNA\\tNA\\tNA\\tNA\\t' if hasattr(peak,'pzpeak'): s += '\\t'.join('%.2f' % x", "> dist: closest = jp if closest is None or", "sum((x-mean)**2 for x in l)/(len(l)-1) return math.sqrt(variance) def std_err(l): return", "= ''' Usage: join_peaks.py [options] PEAKS CHIP INPUT [ (PEAKS", "csv import collections import docopt import peakzilla_qnorm_mapq_patched as pz __doc__", "pz.Peak() jp.pzpeak.size = self.peak_size jp.pzpeak.shift = self.peak_shift jp.pzpeak.position = jp.center", "set_name not in jp.peaks: #jp.peaks[set_name] = SlavePeak( #set_name,matrix[i][j],matrix[i][j + len(peaksets)])", "set_name self.chrom = chrom self.start = int(start) self.end = int(end)", "experiments determined by peakzilla. For each ChIP experiment, input a", "= collections.deque([]) self.plus_mapq = collections.deque([]) self.minus_mapq = collections.deque([]) for peak", "enumerate(peaksets.keys()): #if set_name not in jp.peaks: #jp.peaks[set_name] = SlavePeak( #set_name,matrix[i][j],matrix[i][j", "float(sum(self.plus_mapq) + sum(self.minus_mapq) )/max(1,(len(self.plus_mapq) + len(self.minus_mapq))) #if peak.name == 'Peak_12869':", "joined_peaks.items(): for jp in peaks: if set_name not in jp.peaks:", "[ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ]) + '\\t' peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment) else: s +=", "peakzilla. This will output a table with 3 columns identifying", "get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage = get_coverage(filesets[set_name],'chip',jp) #matrix[i][j] = float(chip_coverage) #matrix[i][j+len(peaksets)] = float(control_coverage)", "self.peakfile = peakfile self.chip_file = chipfile self.chip_tags = pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True)", "maxdist: if closest is None or closest.dist(peak) > dist: closest", "#i += 1 ''' i = 0 for chrom,peaks in", "peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ]) + '\\t' peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\t'", "End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies the number of peaks that were", "= 'Slave' self.set_name = set_name self.center = center class PZPeak(Peak):", "for chrom,peaks in self.peaks.items(): self.peaks[chrom] = sorted(self.peaks[chrom], lambda a,b: cmp(a.position,b.position))", "there's no qualifying one npeaks = 0 joined_peaks = collections.defaultdict(list)", "s += peak.name + '\\t' + '\\t'.join('%.2f' % x for", "return getattr(self, '%s_tags' % type) maxdist = int(args['--max-distance']) peaksets =", "from peakzilla and the remaining columns are re-calculated in this", "in common between multiple ChIP experiments determined by peakzilla. For", "= int(summit) self.score = float(score) self.chip = float(chip) self.control =", "a table with 3 columns identifying the peaks (Chromosome, Start,", "hasattr(peak,'pzpeak'): s += '\\t'.join('%.2f' % x for x in [", "1 for chrom,peaks in self.peaks.items(): self.peaks[chrom] = sorted(self.peaks[chrom], lambda a,b:", "s def __str__(self): s = '' called_peaks = 0 peak_signals", "for peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name = os.path.basename(peakfile).split('.')[0] peaksets[set_name] = collections.defaultdict(list)", "= jp if closest is None or not closest.can_add(peak): npeaks", "in peaks: if set_name not in jp.peaks: jp.peaks[set_name] = SlavePeak(set_name,jp.center)", "start = start, #end = jp.center+JoinedPeak.WIDTH/2): #if aln.is_reverse: #score +=", "peaks: #for j,set_name in enumerate(peaksets.keys()): #if set_name not in jp.peaks:", "error on the mean among the ChIP and Enrich values", "= collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '-')) self.plus_window = collections.deque([]) self.minus_window =", "not fitting in the window any more while self.plus_window and", "2 BED files (CHIP and INPUT) as input to peakzilla.", "position to region if over threshold self.position = peak.position if", "= jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start = jp.center, #end = jp.center+1) #matrix =", "= jp.center, #end = jp.center+1) #matrix = np.zeros((npeaks,len(peaksets)*2)) #i =", "#print jp #i += 1 ''' i = 0 for", "raise return s class PeakScorer(pz.PeakContainer): def __init__(self, ip_tags, control_tags, peak_size,", "s class PeakScorer(pz.PeakContainer): def __init__(self, ip_tags, control_tags, peak_size, plus_model, minus_model):", "% x for x in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) + '\\t' called_peaks +=", "JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score = collections.defaultdict(list) for chrom,peaks in joined_peaks.items(): for jp", "INPUT [ (PEAKS CHIP INPUT) ... ] This script finds", "maximum summit distance to join peaks [default: 10] ''' args", "pzpeak return sum(p.center for p in self.peaks.values())/len(self.peaks) def name(self): return", "+= '%.1f\\t%.1f\\t%.1f\\t' % ( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s += '\\t'.join([str(x) for x", "self.score_threshold = 10 self.plus_model = plus_model self.minus_model = minus_model self.peaks", "peaks. For each experinent \"X\", information about the peaks are", "( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s += '\\t'.join([str(x) for x in #[peak.score,peak.chip,peak.fold_enrichment]]) try:", "self.determine_signal_over_background(chrom) class FileSet(object): def __init__(self,peakfile,chipfile,controlfile): self.peakfile = peakfile self.chip_file =", "max(peak_signals),min(peak_signals) raise return s class PeakScorer(pz.PeakContainer): def __init__(self, ip_tags, control_tags,", "= os.path.basename(peakfile).split('.')[0] peaksets[set_name] = collections.defaultdict(list) filesets[set_name] = FileSet(peakfile,chipfile,controlfile) r =", "peakcounter > maxpeaks: break peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) ''' for row in r:", "len(peaksets)]) #else: #jp.peaks[set_name].computed_chip = matrix[i][j] #jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment() #print", "in enumerate(filesets.keys()): matrix[i][j] = float(jp.peaks[set_name].computed_chip) matrix[i][j+len(peaksets)] = float(jp.peaks[set_name].computed_control) i +=", "are re-calculated in this script (also output regardless of the", "in self.peaks[chrom]: # fill windows while plus_tags and plus_tags[0] <=", "each peak in the new set # make new peaks", "columns identifying the peaks (Chromosome, Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies", "#[peak.score,peak.chip,peak.fold_enrichment]]) try: if len(peak_signals): s = '\\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\t%.2f\\t%.2f\\t%.2f\\t'", "control_tags self.peak_size = peak_size self.peak_shift = (peak_size - 1) /", "= self.add(pzpeak) #pzpeak.center def can_add(self,pzpeak): return not pzpeak.set_name in self.peaks", "pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True) self.control_file = controlfile self.control_tags = pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True) #print", "in peaks: closest = None for jp in joined_peaks[chrom]: dist", "INPUT) ... ] This script finds peaks in common between", "import peakzilla_qnorm_mapq_patched as pz __doc__ = ''' Usage: join_peaks.py [options]", "is None or closest.dist(peak) > dist: closest = jp if", "peak, ChipSE and EnrichSE are the standard error on the", ", sum(self.minus_mapq), len(self.plus_mapq) , len(self.minus_mapq) #print peak.mapq_score setattr(peak,scoretype,self.calculate_score()) def score_peaks(self,peak_dict):", "Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies the number of peaks that were called", "will output a table with 3 columns identifying the peaks", "and self.minus_window[0] < (peak.position - self.peak_shift): self.minus_window.popleft() self.minus_mapq.popleft() # calculate", "sum(self.minus_mapq), len(self.plus_mapq) , len(self.minus_mapq) #print peak.mapq_score setattr(peak,scoretype,self.calculate_score()) def score_peaks(self,peak_dict): for", "add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name] = pzpeak return sum(p.center for p in", "are output: 'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All 'PZ' columns are the original", "'' called_peaks = 0 peak_signals = [] peak_enrichs = []", "matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment() #print jp #i += 1 ''' i =", "self.minus_window[0] < (peak.position - self.peak_shift): self.minus_window.popleft() self.minus_mapq.popleft() # calculate normalized", "= get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage = get_coverage(filesets[set_name],'chip',jp) #matrix[i][j] = float(chip_coverage) #matrix[i][j+len(peaksets)] =", "+ len(peaksets)]) #else: #jp.peaks[set_name].computed_chip = matrix[i][j] #jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment()", "make new peaks when there's no qualifying one npeaks =", "float(row[5]) >= 100 and float(row[8]) >= 10: peakcounter += 1", "self.peak_shift): self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft()) # get rid of old tags not", "closest = None for jp in joined_peaks[chrom]: dist = jp.dist(peak)", "biggest and smallest ChIP peak, ChipSE and EnrichSE are the", "calculate normalized background level # add position to region if", "mean among the ChIP and Enrich values for the peaks.", "chrom,peaks in joined_peaks.items(): for jp in peaks: if set_name not", "'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] ) + '\\t' return s def __str__(self): s =", "peakzilla_qnorm_mapq_patched as pz __doc__ = ''' Usage: join_peaks.py [options] PEAKS", "compute_fold_enrichment(self): self.computed_fold_enrichment = float(self.computed_chip )/self.computed_control class SlavePeak(Peak): def __init__(self,set_name,center): self.name", "return stddev(l)/math.sqrt(len(l)) class Peak(object): def dist(self,other): if self.chrom == other.chrom:", "peak_signals = [] peak_enrichs = [] for set_name,peak in self.peaks.items():", "= set() def __init__(self,pzpeak): self.chrom = pzpeak.chrom self.peaks = {}", "self.plus_model = plus_model self.minus_model = minus_model self.peaks = collections.defaultdict(list) self.peak_count", "self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.plus_mapq = collections.deque([]) self.minus_mapq", "in r: if float(row[5]) >= 100 and float(row[8]) >= 10:", "for the peaks. For each experinent \"X\", information about the", "== 'Peak_12869': #print zip(self.plus_window,self.plus_mapq) #print zip(self.minus_window,self.minus_mapq) #print sum(self.plus_mapq) , sum(self.minus_mapq),", "set # make new peaks when there's no qualifying one", "matrix[i][j] #jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment() #print jp #i += 1", "float(chip) self.control = float(control) self.fold_enrichment = float(fold_enrichment) self.distribution_score = float(distribution_score)", "= (peak_size - 1) / 2 self.score_threshold = 10 self.plus_model", "peak.name + '\\t' + '\\t'.join('%.2f' % x for x in", "chipfile self.chip_tags = pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True) self.control_file = controlfile self.control_tags =", "Enrich values for the peaks. For each experinent \"X\", information", "self.minus_window and self.minus_window[0] < (peak.position - self.peak_shift): self.minus_window.popleft() self.minus_mapq.popleft() #", "( #peak.score,peak.chip,peak.control,peak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\tNA\\t' if hasattr(peak,'pzpeak'): s +=", "self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom) class FileSet(object): def __init__(self,peakfile,chipfile,controlfile): self.peakfile = peakfile", "This script finds peaks in common between multiple ChIP experiments", "filesets[set_name] = FileSet(peakfile,chipfile,controlfile) r = csv.reader(open(peakfile),delimiter='\\t') r.next() # header '''", "= pz.generate_ideal_model(JoinedPeak.WIDTH) #def get_coverage(fileset,type,jp,pseudocount=0): #score = 0 #start = max(0,jp.center-JoinedPeak.WIDTH/2)", "= np.zeros((npeaks,len(peaksets)*2)) #i = 0 #for chrom,peaks in joined_peaks.items(): #for", "2 self.score_threshold = 10 self.plus_model = plus_model self.minus_model = minus_model", "len(peaksets) # find closest peak to each peak in the", "new set # make new peaks when there's no qualifying", "#i += 1 #quantile_normalize.quantile_norm(matrix) #i = 0 #for chrom,peaks in", "fill_scores(self,chrom,libtype,scoretype): plus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '+')) plus_mapq = collections.deque(getattr(self,'%s_tags'", "windows while plus_tags and plus_tags[0] <= (peak.position + self.peak_shift): self.plus_window.append(plus_tags.popleft())", "if self.chrom == other.chrom: return abs(self.center-other.center) else: return -1 def", "for x in [ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] ) + '\\t' return s", "WIDTH = 0 HEADER = ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES = set() def", "the number of peaks that were called among all the", "peak.name == 'Peak_12869': #print zip(self.plus_window,self.plus_mapq) #print zip(self.minus_window,self.minus_mapq) #print sum(self.plus_mapq) ,", "'%.1f\\t%.1f\\t%.1f\\t' % ( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s += '\\t'.join([str(x) for x in", "r = csv.reader(open(peakfile),delimiter='\\t') r.next() # header ''' #XXX: limit peaks", "= set_name self.center = center class PZPeak(Peak): def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr):", "def __init__(self,set_name,center): self.name = 'Slave' self.set_name = set_name self.center =", "name(self): return '%s_%d' % (self.chrom,self.center) @classmethod def header(cls): s =", "header ''' #XXX: limit peaks maxpeaks = 20 peakcounter =", "float(chip_coverage) #matrix[i][j+len(peaksets)] = float(control_coverage) #i += 1 #quantile_normalize.quantile_norm(matrix) #i =", "in this script (also output regardless of the presence of", "= float(chip_coverage) #matrix[i][j+len(peaksets)] = float(control_coverage) #i += 1 #quantile_normalize.quantile_norm(matrix) #i", "to peakzilla. This will output a table with 3 columns", "jp if closest is None or not closest.can_add(peak): npeaks +=", "joined_peaks.items(): for peak in peaks: print peak #plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH)", "% type) maxdist = int(args['--max-distance']) peaksets = {} filesets =", "pz.generate_ideal_model(JoinedPeak.WIDTH) for set_name,fileset in filesets.items(): scorer = PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score", "(self.chrom,self.center) @classmethod def header(cls): s = '\\t'.join(cls.HEADER) + '\\t' #'#Chromosome\\tPosition\\tNPeaks\\tSpread\\t'", "collections.defaultdict(list) for chrom,peaks in joined_peaks.items(): for jp in peaks: if", "+ s except: print max(peak_signals),min(peak_signals) raise return s class PeakScorer(pz.PeakContainer):", "= get_coverage(filesets[set_name],'chip',jp) #matrix[i][j] = float(chip_coverage) #matrix[i][j+len(peaksets)] = float(control_coverage) #i +=", "PEAKS CHIP INPUT [ (PEAKS CHIP INPUT) ... ] This", "self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.position = 0 def", "and smallest ChIP peak, ChipSE and EnrichSE are the standard", "1 if peakcounter > maxpeaks: break peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) ''' for row", "class Peak(object): def dist(self,other): if self.chrom == other.chrom: return abs(self.center-other.center)", "for p in self.peaks.values())/len(self.peaks) def name(self): return '%s_%d' % (self.chrom,self.center)", "#else: #jp.peaks[set_name].computed_chip = matrix[i][j] #jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment() #print jp", "Spread is the difference between the biggest and smallest ChIP", "10: peakcounter += 1 if peakcounter > maxpeaks: break peaksets[set_name][row[0]].append(PZPeak(set_name,*row))", "libtype).get_mapq(chrom, '-')) self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.plus_mapq =", "peak). Options: --max-distance=DIST maximum summit distance to join peaks [default:", "self.plus_window[0] < (peak.position - self.peak_shift): self.plus_window.popleft() self.plus_mapq.popleft() while self.minus_window and", "(peak.position - self.peak_shift): self.plus_window.popleft() self.plus_mapq.popleft() while self.minus_window and self.minus_window[0] <", "self.minus_window.popleft() self.minus_mapq.popleft() # calculate normalized background level # add position", ">= 10: peakcounter += 1 if peakcounter > maxpeaks: break", "for jp in peaks: for j,set_name in enumerate(filesets.keys()): matrix[i][j] =", "htype + '_' + x for x in [ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq']", "r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH /= len(peaksets) # find", "between multiple ChIP experiments determined by peakzilla. For each ChIP", "import collections import docopt import peakzilla_qnorm_mapq_patched as pz __doc__ =", "pz.generate_ideal_model(JoinedPeak.WIDTH) #def get_coverage(fileset,type,jp,pseudocount=0): #score = 0 #start = max(0,jp.center-JoinedPeak.WIDTH/2) #for", "self.peak_shift jp.pzpeak.position = jp.center jp.pzpeak.name = jp.name self.peaks[chrom].append(jp.pzpeak) self.peak_count +=", "controlfile self.control_tags = pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True) #print self.chip_tags, self.control_tags def get_file(self,type):", "self.plus_mapq.append(plus_mapq.popleft()) while minus_tags and minus_tags[0] <= (peak.position + self.peak_shift): self.minus_window.append(minus_tags.popleft())", "plus_model self.minus_model = minus_model self.peaks = collections.defaultdict(list) self.peak_count = 0", "'%.1f\\t%.1f\\t%.1f\\t%.1f\\t' % ( #peak.score,peak.chip,peak.control,peak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\tNA\\t' if hasattr(peak,'pzpeak'):", "#reference = jp.chrom, start = start, #end = jp.center+JoinedPeak.WIDTH/2): #if", "collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '+')) plus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '+'))", "python import os import sys import math import csv import", "sum(p.center for p in self.peaks.values())/len(self.peaks) def name(self): return '%s_%d' %", "if hasattr(peak,'pzpeak'): s += '\\t'.join('%.2f' % x for x in", "when there's no qualifying one npeaks = 0 joined_peaks =", "ChIP experiment, input a PEAKS file as otuput by peakzilla,", "self.control_tags = pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True) #print self.chip_tags, self.control_tags def get_file(self,type): return", "float(row[8]) >= 10: peakcounter += 1 if peakcounter > maxpeaks:", "0 #for chrom,peaks in joined_peaks.items(): #for jp in peaks: #for", "in jp.peaks: #jp.peaks[set_name] = SlavePeak( #set_name,matrix[i][j],matrix[i][j + len(peaksets)]) #else: #jp.peaks[set_name].computed_chip", "with 3 columns identifying the peaks (Chromosome, Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE').", "and self.plus_window[0] < (peak.position - self.peak_shift): self.plus_window.popleft() self.plus_mapq.popleft() while self.minus_window", "self.peaks = collections.defaultdict(list) self.peak_count = 0 self.plus_window = collections.deque([]) self.minus_window", "docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True) def stddev(l): mean = sum(l)/float(len(l)) variance = sum((x-mean)**2", "self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name] = pzpeak return sum(p.center for p in self.peaks.values())/len(self.peaks)", "setattr(peak,scoretype,self.calculate_score()) def score_peaks(self,peak_dict): for chrom,peaks in peak_dict.items(): for jp in", "dist <= maxdist: if closest is None or closest.dist(peak) >", "# fill windows while plus_tags and plus_tags[0] <= (peak.position +", "j,set_name in enumerate(filesets.keys()): matrix[i][j] = float(jp.peaks[set_name].computed_chip) matrix[i][j+len(peaksets)] = float(jp.peaks[set_name].computed_control) i", "] This script finds peaks in common between multiple ChIP", "normalized background level # add position to region if over", "[default: 10] ''' args = docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True) def stddev(l): mean", "signifies the number of peaks that were called among all", "math.sqrt(variance) def std_err(l): return stddev(l)/math.sqrt(len(l)) class Peak(object): def dist(self,other): if", "chrom,peaks in peakset.items(): for peak in peaks: closest = None", "#score += minus_model[aln.pos-start] #else: #score += plus_model[aln.pos-start] #return (score+pseudocount)*10.**6/fileset.get_tagcount(type) #return", "for set_name,peakset in peaksets.items(): for chrom,peaks in peakset.items(): for peak", "pzpeak.set_name in self.peaks def add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name] = pzpeak return", "FileSet(peakfile,chipfile,controlfile) r = csv.reader(open(peakfile),delimiter='\\t') r.next() # header ''' #XXX: limit", "'\\t'.join(cls.HEADER) + '\\t' #'#Chromosome\\tPosition\\tNPeaks\\tSpread\\t' for htype in cls.HEADER_TYPES: s +=", "--max-distance=DIST maximum summit distance to join peaks [default: 10] '''", "% libtype).get_mapq(chrom, '+')) minus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '-')) minus_mapq", "#if set_name not in jp.peaks: #jp.peaks[set_name] = SlavePeak( #set_name,matrix[i][j],matrix[i][j +", "CHIP INPUT [ (PEAKS CHIP INPUT) ... ] This script", "= {} filesets = {} for peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name", "to each peak in the new set # make new", "sum(l)/float(len(l)) variance = sum((x-mean)**2 for x in l)/(len(l)-1) return math.sqrt(variance)", "a,b: cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom) class FileSet(object): def __init__(self,peakfile,chipfile,controlfile):", "l)/(len(l)-1) return math.sqrt(variance) def std_err(l): return stddev(l)/math.sqrt(len(l)) class Peak(object): def", "this script (also output regardless of the presence of a", "s += '\\t'.join('%.2f' % x for x in [ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score", "original output from peakzilla and the remaining columns are re-calculated", "== other.chrom: return abs(self.center-other.center) else: return -1 def compute_fold_enrichment(self): self.computed_fold_enrichment", ")/max(1,(len(self.plus_mapq) + len(self.minus_mapq))) #if peak.name == 'Peak_12869': #print zip(self.plus_window,self.plus_mapq) #print", "peaks: closest = None for jp in joined_peaks[chrom]: dist =", "<= (peak.position + self.peak_shift): self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft()) while minus_tags and minus_tags[0]", "background level # add position to region if over threshold", ") + s else: s = '\\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\tNA\\tNA\\tNA\\t'", "#print peak.mapq_score setattr(peak,scoretype,self.calculate_score()) def score_peaks(self,peak_dict): for chrom,peaks in peak_dict.items(): for", "'\\t' peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s +=", "#matrix[i][j] = float(chip_coverage) #matrix[i][j+len(peaksets)] = float(control_coverage) #i += 1 #quantile_normalize.quantile_norm(matrix)", "called_peaks += 1 #s += '%.1f\\t%.1f\\t%.1f\\t%.1f\\t' % ( #peak.score,peak.chip,peak.control,peak.fold_enrichment) else:", "s = '' called_peaks = 0 peak_signals = [] peak_enrichs", "minus_model): self.ip_tags = ip_tags self.control_tags = control_tags self.peak_size = peak_size", "= SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score) print JoinedPeak.header() for chrom,peaks in joined_peaks.items():", "experinent \"X\", information about the peaks are output: 'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'.", "maxpeaks = 20 peakcounter = 0 for row in r:", "'Peak_12869': #print zip(self.plus_window,self.plus_mapq) #print zip(self.minus_window,self.minus_mapq) #print sum(self.plus_mapq) , sum(self.minus_mapq), len(self.plus_mapq)", "for chrom,peaks in joined_peaks.items(): for jp in peaks: for j,set_name", "len(self.plus_mapq) , len(self.minus_mapq) #print peak.mapq_score setattr(peak,scoretype,self.calculate_score()) def score_peaks(self,peak_dict): for chrom,peaks", "ip_tags, control_tags, peak_size, plus_model, minus_model): self.ip_tags = ip_tags self.control_tags =", "peaks: print peak #plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) #def get_coverage(fileset,type,jp,pseudocount=0): #score =", "'\\t' called_peaks += 1 #s += '%.1f\\t%.1f\\t%.1f\\t%.1f\\t' % ( #peak.score,peak.chip,peak.control,peak.fold_enrichment)", "chrom,peaks in joined_peaks.items(): for jp in peaks: for j,set_name in", "self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft()) while minus_tags and minus_tags[0] <= (peak.position + self.peak_shift):", "Peak(object): def dist(self,other): if self.chrom == other.chrom: return abs(self.center-other.center) else:", "#quantile_normalize.quantile_norm(matrix) #i = 0 #for chrom,peaks in joined_peaks.items(): #for jp", "'%s_file' % type) def get_tagcount(self,type): return getattr(self, '%s_tags' % type)", "def header(cls): s = '\\t'.join(cls.HEADER) + '\\t' #'#Chromosome\\tPosition\\tNPeaks\\tSpread\\t' for htype", "difference between the biggest and smallest ChIP peak, ChipSE and", "as input to peakzilla. This will output a table with", "= ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES = set() def __init__(self,pzpeak): self.chrom = pzpeak.chrom", "1 #s += '%.1f\\t%.1f\\t%.1f\\t%.1f\\t' % ( #peak.score,peak.chip,peak.control,peak.fold_enrichment) else: s +=", "the difference between the biggest and smallest ChIP peak, ChipSE", "= 0 #start = max(0,jp.center-JoinedPeak.WIDTH/2) #for aln in fileset.get_file(type).fetch( #reference", "in fileset.get_file(type).fetch( #reference = jp.chrom, start = start, #end =", "name self.center = int(summit) self.score = float(score) self.chip = float(chip)", "while self.plus_window and self.plus_window[0] < (peak.position - self.peak_shift): self.plus_window.popleft() self.plus_mapq.popleft()", "HEADER = ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES = set() def __init__(self,pzpeak): self.chrom =", "s = '\\t'.join(cls.HEADER) + '\\t' #'#Chromosome\\tPosition\\tNPeaks\\tSpread\\t' for htype in cls.HEADER_TYPES:", "by peakzilla. For each ChIP experiment, input a PEAKS file", "if len(peak_signals): s = '\\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\t%.2f\\t%.2f\\t%.2f\\t' % (", "self.chrom = chrom self.start = int(start) self.end = int(end) self.name", "control_tags, peak_size, plus_model, minus_model): self.ip_tags = ip_tags self.control_tags = control_tags", "self.chip_tags(chipfile,True) self.control_file = controlfile self.control_tags = pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True) #print self.chip_tags,", "and float(row[8]) >= 10: peakcounter += 1 if peakcounter >", "plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) for set_name,fileset in filesets.items(): scorer = PeakScorer(fileset.chip_tags,fileset.control_tags,", "peaks_to_score = collections.defaultdict(list) for chrom,peaks in joined_peaks.items(): for jp in", "and Enrich values for the peaks. For each experinent \"X\",", "in l)/(len(l)-1) return math.sqrt(variance) def std_err(l): return stddev(l)/math.sqrt(len(l)) class Peak(object):", "__init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr): self.set_name = set_name self.chrom = chrom self.start =", "% ( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s += '\\t'.join([str(x) for x in #[peak.score,peak.chip,peak.fold_enrichment]])", "= collections.deque([]) self.position = 0 def fill_scores(self,chrom,libtype,scoretype): plus_tags = collections.deque(getattr(self,'%s_tags'", "peak_size, plus_model, minus_model): self.ip_tags = ip_tags self.control_tags = control_tags self.peak_size", "def __init__(self,pzpeak): self.chrom = pzpeak.chrom self.peaks = {} self.center =", "is the difference between the biggest and smallest ChIP peak,", "npeaks += 1 joined_peaks[chrom].append(JoinedPeak(peak)) else: closest.add(peak) plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) for", "join_peaks.py [options] PEAKS CHIP INPUT [ (PEAKS CHIP INPUT) ...", "def std_err(l): return stddev(l)/math.sqrt(len(l)) class Peak(object): def dist(self,other): if self.chrom", "#start = jp.center, #end = jp.center+1) #matrix = np.zeros((npeaks,len(peaksets)*2)) #i", "score_peaks(self,peak_dict): for chrom,peaks in peak_dict.items(): for jp in peaks: jp.pzpeak", "return not pzpeak.set_name in self.peaks def add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name] =", "in peaksets.items(): for chrom,peaks in peakset.items(): for peak in peaks:", "= collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '+')) minus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom,", "self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\t%.2f\\t%.2f\\t%.2f\\t' % ( max(peak_signals)/(min(peak_signals) + sys.float_info.epsilon), std_err(peak_signals), std_err(peak_enrichs),", "self.position = peak.position if libtype == 'ip': peak.mapq_score = float(sum(self.plus_mapq)", "float(control_coverage) #i += 1 #quantile_normalize.quantile_norm(matrix) #i = 0 #for chrom,peaks", "% x for x in [ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ]) + '\\t'", "self.center = center class PZPeak(Peak): def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr): self.set_name =", "aln.is_reverse: #score += minus_model[aln.pos-start] #else: #score += plus_model[aln.pos-start] #return (score+pseudocount)*10.**6/fileset.get_tagcount(type)", "10.**6*fileset.get_file(type).count( #reference = jp.chrom, #start = max(0,jp.center-JoinedPeak.WIDTH/2), #end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type)", "if libtype == 'ip': peak.mapq_score = float(sum(self.plus_mapq) + sum(self.minus_mapq) )/max(1,(len(self.plus_mapq)", "x in l)/(len(l)-1) return math.sqrt(variance) def std_err(l): return stddev(l)/math.sqrt(len(l)) class", "'\\t%.2f\\t%.2f\\t%.2f\\t' % ( max(peak_signals)/(min(peak_signals) + sys.float_info.epsilon), std_err(peak_signals), std_err(peak_enrichs), ) +", "#s += '%.1f\\t%.1f\\t%.1f\\t' % ( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s += '\\t'.join([str(x) for", "print JoinedPeak.header() for chrom,peaks in joined_peaks.items(): for peak in peaks:", "= float(distribution_score) self.fdr = float(fdr) def width(self): return self.end-self.start+1 class", "def score_peaks(self,peak_dict): for chrom,peaks in peak_dict.items(): for jp in peaks:", "jp.pzpeak.shift = self.peak_shift jp.pzpeak.position = jp.center jp.pzpeak.name = jp.name self.peaks[chrom].append(jp.pzpeak)", "in jp.peaks: jp.peaks[set_name] = SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score) print JoinedPeak.header() for", "#for jp in peaks: #for j,set_name in enumerate(peaksets.keys()): #control_coverage =", "- self.peak_shift): self.plus_window.popleft() self.plus_mapq.popleft() while self.minus_window and self.minus_window[0] < (peak.position", "zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name = os.path.basename(peakfile).split('.')[0] peaksets[set_name] = collections.defaultdict(list) filesets[set_name] = FileSet(peakfile,chipfile,controlfile)", "r.next() # header ''' #XXX: limit peaks maxpeaks = 20", "collections.deque([]) self.minus_window = collections.deque([]) self.position = 0 def fill_scores(self,chrom,libtype,scoretype): plus_tags", "experiment, input a PEAKS file as otuput by peakzilla, and", "by peakzilla, and 2 BED files (CHIP and INPUT) as", "__init__(self,pzpeak): self.chrom = pzpeak.chrom self.peaks = {} self.center = self.add(pzpeak)", "'%s_tags' % type) maxdist = int(args['--max-distance']) peaksets = {} filesets", "def width(self): return self.end-self.start+1 class JoinedPeak(Peak): WIDTH = 0 HEADER", "self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\tNA\\tNA\\tNA\\t' + s except: print max(peak_signals),min(peak_signals) raise return", "class PeakScorer(pz.PeakContainer): def __init__(self, ip_tags, control_tags, peak_size, plus_model, minus_model): self.ip_tags", "zip(self.plus_window,self.plus_mapq) #print zip(self.minus_window,self.minus_mapq) #print sum(self.plus_mapq) , sum(self.minus_mapq), len(self.plus_mapq) , len(self.minus_mapq)", "print peak #plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) #def get_coverage(fileset,type,jp,pseudocount=0): #score = 0", "for chrom,peaks in peak_dict.items(): for jp in peaks: jp.pzpeak =", "peaksets[set_name] = collections.defaultdict(list) filesets[set_name] = FileSet(peakfile,chipfile,controlfile) r = csv.reader(open(peakfile),delimiter='\\t') r.next()", "= peak_size self.peak_shift = (peak_size - 1) / 2 self.score_threshold", "= collections.deque([]) self.minus_window = collections.deque([]) self.plus_mapq = collections.deque([]) self.minus_mapq =", "in joined_peaks[chrom]: dist = jp.dist(peak) if dist >= 0 and", "% libtype).get_tags(chrom, '+')) plus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '+')) minus_tags", "while minus_tags and minus_tags[0] <= (peak.position + self.peak_shift): self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft())", "0 for chrom,peaks in joined_peaks.items(): for jp in peaks: for", "float(control) self.fold_enrichment = float(fold_enrichment) self.distribution_score = float(distribution_score) self.fdr = float(fdr)", "dist >= 0 and dist <= maxdist: if closest is", "in joined_peaks.items(): #for jp in peaks: #for j,set_name in enumerate(peaksets.keys()):", "def __init__(self,peakfile,chipfile,controlfile): self.peakfile = peakfile self.chip_file = chipfile self.chip_tags =", "that were called among all the ChIP experiments, Spread is", "''' args = docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True) def stddev(l): mean = sum(l)/float(len(l))", "+= 'NA\\tNA\\tNA\\tNA\\tNA\\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s += '%.1f\\t%.1f\\t%.1f\\t' % ( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s", "'\\t'.join([str(x) for x in #[peak.score,peak.chip,peak.fold_enrichment]]) try: if len(peak_signals): s =", "peakfile self.chip_file = chipfile self.chip_tags = pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True) self.control_file =", "add position to region if over threshold self.position = peak.position", "self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom) class FileSet(object): def __init__(self,peakfile,chipfile,controlfile): self.peakfile =", "#np.set_printoptions(precision=1,suppress=True) def stddev(l): mean = sum(l)/float(len(l)) variance = sum((x-mean)**2 for", "self.plus_mapq.popleft() while self.minus_window and self.minus_window[0] < (peak.position - self.peak_shift): self.minus_window.popleft()", "= collections.deque([]) self.minus_window = collections.deque([]) self.position = 0 def fill_scores(self,chrom,libtype,scoretype):", "(Chromosome, Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies the number of peaks", "= 0 self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.position =", "among the ChIP and Enrich values for the peaks. For", "set_name,peakset in peaksets.items(): for chrom,peaks in peakset.items(): for peak in", "the standard error on the mean among the ChIP and", "collections import docopt import peakzilla_qnorm_mapq_patched as pz __doc__ = '''", "= {} for peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name = os.path.basename(peakfile).split('.')[0] peaksets[set_name]", "in peaks: print peak #plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) #def get_coverage(fileset,type,jp,pseudocount=0): #score", "self.peaks[chrom] = sorted(self.peaks[chrom], lambda a,b: cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom)", "+= peak.name + '\\t' + '\\t'.join('%.2f' % x for x", "minus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '-')) self.plus_window = collections.deque([]) self.minus_window", "the ChIP and Enrich values for the peaks. For each", "PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score = collections.defaultdict(list) for chrom,peaks in joined_peaks.items(): for", "if hasattr(peak,'score'): s += peak.name + '\\t' + '\\t'.join('%.2f' %", "in joined_peaks.items(): for peak in peaks: print peak #plus_model,minus_model =", "0 peak_signals = [] peak_enrichs = [] for set_name,peak in", "% libtype).get_mapq(chrom, '-')) self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.plus_mapq", "on the mean among the ChIP and Enrich values for", "'\\t'.join( htype + '_' + x for x in [", "return s class PeakScorer(pz.PeakContainer): def __init__(self, ip_tags, control_tags, peak_size, plus_model,", "... ] This script finds peaks in common between multiple", "#if peak.name == 'Peak_12869': #print zip(self.plus_window,self.plus_mapq) #print zip(self.minus_window,self.minus_mapq) #print sum(self.plus_mapq)", "HEADER_TYPES = set() def __init__(self,pzpeak): self.chrom = pzpeak.chrom self.peaks =", "closest = jp if closest is None or not closest.can_add(peak):", "self.peak_shift = (peak_size - 1) / 2 self.score_threshold = 10", "#s += '%.1f\\t%.1f\\t%.1f\\t%.1f\\t' % ( #peak.score,peak.chip,peak.control,peak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\tNA\\t'", "closest is None or closest.dist(peak) > dist: closest = jp", "= start, #end = jp.center+JoinedPeak.WIDTH/2): #if aln.is_reverse: #score += minus_model[aln.pos-start]", "= peakfile self.chip_file = chipfile self.chip_tags = pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True) self.control_file", "in #[peak.score,peak.chip,peak.fold_enrichment]]) try: if len(peak_signals): s = '\\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\", "peakzilla and the remaining columns are re-calculated in this script", "NPeaks signifies the number of peaks that were called among", "= 10 self.plus_model = plus_model self.minus_model = minus_model self.peaks =", "in self.peaks.values())/len(self.peaks) def name(self): return '%s_%d' % (self.chrom,self.center) @classmethod def", "#return (score+pseudocount)*10.**6/fileset.get_tagcount(type) #return 10.**6*fileset.get_file(type).count( #reference = jp.chrom, #start = max(0,jp.center-JoinedPeak.WIDTH/2),", "= max(0,jp.center-JoinedPeak.WIDTH/2), #end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start = jp.center, #end =", "threshold self.position = peak.position if libtype == 'ip': peak.mapq_score =", "peak_enrichs = [] for set_name,peak in self.peaks.items(): if hasattr(peak,'score'): s", "PEAKS file as otuput by peakzilla, and 2 BED files", "peak.mapq_score setattr(peak,scoretype,self.calculate_score()) def score_peaks(self,peak_dict): for chrom,peaks in peak_dict.items(): for jp", "getattr(self, '%s_file' % type) def get_tagcount(self,type): return getattr(self, '%s_tags' %", "chrom,peaks in joined_peaks.items(): for peak in peaks: print peak #plus_model,minus_model", "remaining columns are re-calculated in this script (also output regardless", "self.peak_shift): self.minus_window.popleft() self.minus_mapq.popleft() # calculate normalized background level # add", "filesets = {} for peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name = os.path.basename(peakfile).split('.')[0]", "= collections.defaultdict(list) filesets[set_name] = FileSet(peakfile,chipfile,controlfile) r = csv.reader(open(peakfile),delimiter='\\t') r.next() #", ", len(self.minus_mapq) #print peak.mapq_score setattr(peak,scoretype,self.calculate_score()) def score_peaks(self,peak_dict): for chrom,peaks in", "CHIP INPUT) ... ] This script finds peaks in common", "For each ChIP experiment, input a PEAKS file as otuput", "'\\t'.join('%.2f' % x for x in [ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ]) +", "window any more while self.plus_window and self.plus_window[0] < (peak.position -", "for row in r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH /=", "% ( max(peak_signals)/(min(peak_signals) + sys.float_info.epsilon), std_err(peak_signals), std_err(peak_enrichs), ) + s", "= sum((x-mean)**2 for x in l)/(len(l)-1) return math.sqrt(variance) def std_err(l):", "class JoinedPeak(Peak): WIDTH = 0 HEADER = ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES =", "#print zip(self.plus_window,self.plus_mapq) #print zip(self.minus_window,self.minus_mapq) #print sum(self.plus_mapq) , sum(self.minus_mapq), len(self.plus_mapq) ,", "'\\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\tNA\\tNA\\tNA\\t' + s except: print max(peak_signals),min(peak_signals) raise", "jp in peaks: jp.pzpeak = pz.Peak() jp.pzpeak.size = self.peak_size jp.pzpeak.shift", "#end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start = jp.center, #end = jp.center+1) #matrix", "ChIP experiments determined by peakzilla. For each ChIP experiment, input", "PeakScorer(pz.PeakContainer): def __init__(self, ip_tags, control_tags, peak_size, plus_model, minus_model): self.ip_tags =", "return self.end-self.start+1 class JoinedPeak(Peak): WIDTH = 0 HEADER = ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE']", "# header ''' #XXX: limit peaks maxpeaks = 20 peakcounter", "minus_model[aln.pos-start] #else: #score += plus_model[aln.pos-start] #return (score+pseudocount)*10.**6/fileset.get_tagcount(type) #return 10.**6*fileset.get_file(type).count( #reference", "finds peaks in common between multiple ChIP experiments determined by", "x in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) + '\\t' called_peaks += 1 #s +=", "= '\\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\tNA\\tNA\\tNA\\t' + s except: print max(peak_signals),min(peak_signals)", "__doc__ = ''' Usage: join_peaks.py [options] PEAKS CHIP INPUT [", "were called among all the ChIP experiments, Spread is the", "to join peaks [default: 10] ''' args = docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True)", "def dist(self,other): if self.chrom == other.chrom: return abs(self.center-other.center) else: return", "and dist <= maxdist: if closest is None or closest.dist(peak)", "if closest is None or not closest.can_add(peak): npeaks += 1", "len(self.minus_mapq) #print peak.mapq_score setattr(peak,scoretype,self.calculate_score()) def score_peaks(self,peak_dict): for chrom,peaks in peak_dict.items():", "= SlavePeak( #set_name,matrix[i][j],matrix[i][j + len(peaksets)]) #else: #jp.peaks[set_name].computed_chip = matrix[i][j] #jp.peaks[set_name].computed_control", "EnrichSE are the standard error on the mean among the", "return -1 def compute_fold_enrichment(self): self.computed_fold_enrichment = float(self.computed_chip )/self.computed_control class SlavePeak(Peak):", "self.chip_tags = pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True) self.control_file = controlfile self.control_tags = pz.TagContainer(store_mapq=True)", "Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies the number of peaks that", "set_name = os.path.basename(peakfile).split('.')[0] peaksets[set_name] = collections.defaultdict(list) filesets[set_name] = FileSet(peakfile,chipfile,controlfile) r", "jp.center jp.pzpeak.name = jp.name self.peaks[chrom].append(jp.pzpeak) self.peak_count += 1 for chrom,peaks", "in self.peaks.items(): self.peaks[chrom] = sorted(self.peaks[chrom], lambda a,b: cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background')", "ChIP peak, ChipSE and EnrichSE are the standard error on", "+\\ '\\tNA\\tNA\\tNA\\t' + s except: print max(peak_signals),min(peak_signals) raise return s", "10 self.plus_model = plus_model self.minus_model = minus_model self.peaks = collections.defaultdict(list)", "or closest.dist(peak) > dist: closest = jp if closest is", "+= '%.1f\\t%.1f\\t%.1f\\t%.1f\\t' % ( #peak.score,peak.chip,peak.control,peak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\tNA\\t' if", "#i = 0 #for chrom,peaks in joined_peaks.items(): #for jp in", "+= 1 ''' i = 0 for chrom,peaks in joined_peaks.items():", "sum(self.minus_mapq) )/max(1,(len(self.plus_mapq) + len(self.minus_mapq))) #if peak.name == 'Peak_12869': #print zip(self.plus_window,self.plus_mapq)", "in filesets.items(): scorer = PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score = collections.defaultdict(list) for", "for chrom,peaks in joined_peaks.items(): for jp in peaks: if set_name", "output a table with 3 columns identifying the peaks (Chromosome,", "#print zip(self.minus_window,self.minus_mapq) #print sum(self.plus_mapq) , sum(self.minus_mapq), len(self.plus_mapq) , len(self.minus_mapq) #print", "fold_enrichment,distribution_score,fdr): self.set_name = set_name self.chrom = chrom self.start = int(start)", "= jp.chrom, #start = max(0,jp.center-JoinedPeak.WIDTH/2), #end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start =", "enumerate(peaksets.keys()): #control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage = get_coverage(filesets[set_name],'chip',jp) #matrix[i][j] = float(chip_coverage)", "#control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage = get_coverage(filesets[set_name],'chip',jp) #matrix[i][j] = float(chip_coverage) #matrix[i][j+len(peaksets)]", "'Slave' self.set_name = set_name self.center = center class PZPeak(Peak): def", "fill windows while plus_tags and plus_tags[0] <= (peak.position + self.peak_shift):", "= jp.chrom, start = start, #end = jp.center+JoinedPeak.WIDTH/2): #if aln.is_reverse:", "peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s += '%.1f\\t%.1f\\t%.1f\\t'", "file as otuput by peakzilla, and 2 BED files (CHIP", "max(peak_signals)/(min(peak_signals) + sys.float_info.epsilon), std_err(peak_signals), std_err(peak_enrichs), ) + s else: s", "and plus_tags[0] <= (peak.position + self.peak_shift): self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft()) while minus_tags", "self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom) class FileSet(object): def __init__(self,peakfile,chipfile,controlfile): self.peakfile = peakfile self.chip_file", "# make new peaks when there's no qualifying one npeaks", "-1 def compute_fold_enrichment(self): self.computed_fold_enrichment = float(self.computed_chip )/self.computed_control class SlavePeak(Peak): def", "self.peaks.values())/len(self.peaks) def name(self): return '%s_%d' % (self.chrom,self.center) @classmethod def header(cls):", "#plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) #def get_coverage(fileset,type,jp,pseudocount=0): #score = 0 #start =", "% libtype).get_tags(chrom, '-')) minus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '-')) self.plus_window", "stddev(l)/math.sqrt(len(l)) class Peak(object): def dist(self,other): if self.chrom == other.chrom: return", "#for chrom,peaks in joined_peaks.items(): #for jp in peaks: #for j,set_name", "self.peak_size = peak_size self.peak_shift = (peak_size - 1) / 2", "peak to each peak in the new set # make", "''' for row in r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH", "self.add(pzpeak) #pzpeak.center def can_add(self,pzpeak): return not pzpeak.set_name in self.peaks def", "chrom self.start = int(start) self.end = int(end) self.name = name", "one npeaks = 0 joined_peaks = collections.defaultdict(list) for set_name,peakset in", "+ '\\t' called_peaks += 1 #s += '%.1f\\t%.1f\\t%.1f\\t%.1f\\t' % (", "class SlavePeak(Peak): def __init__(self,set_name,center): self.name = 'Slave' self.set_name = set_name", "'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All 'PZ' columns are the original output from peakzilla", "1) / 2 self.score_threshold = 10 self.plus_model = plus_model self.minus_model", "level # add position to region if over threshold self.position", "cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom) class FileSet(object): def __init__(self,peakfile,chipfile,controlfile): self.peakfile", "class PZPeak(Peak): def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr): self.set_name = set_name self.chrom =", "in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) + '\\t' called_peaks += 1 #s += '%.1f\\t%.1f\\t%.1f\\t%.1f\\t'", "# add position to region if over threshold self.position =", "return sum(p.center for p in self.peaks.values())/len(self.peaks) def name(self): return '%s_%d'", ") + '\\t' return s def __str__(self): s = ''", "self.minus_model = minus_model self.peaks = collections.defaultdict(list) self.peak_count = 0 self.plus_window", "+ len(self.minus_mapq))) #if peak.name == 'Peak_12869': #print zip(self.plus_window,self.plus_mapq) #print zip(self.minus_window,self.minus_mapq)", "peaksets.items(): for chrom,peaks in peakset.items(): for peak in peaks: closest", "self.chrom = pzpeak.chrom self.peaks = {} self.center = self.add(pzpeak) #pzpeak.center", "in cls.HEADER_TYPES: s += '\\t'.join( htype + '_' + x", "x for x in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) + '\\t' called_peaks += 1", "= jp.center jp.pzpeak.name = jp.name self.peaks[chrom].append(jp.pzpeak) self.peak_count += 1 for", "collections.deque([]) self.minus_mapq = collections.deque([]) for peak in self.peaks[chrom]: # fill", "in peakset.items(): for peak in peaks: closest = None for", "int(start) self.end = int(end) self.name = name self.center = int(summit)", "jp #i += 1 ''' i = 0 for chrom,peaks", "+ '\\t' #'#Chromosome\\tPosition\\tNPeaks\\tSpread\\t' for htype in cls.HEADER_TYPES: s += '\\t'.join(", "= minus_model self.peaks = collections.defaultdict(list) self.peak_count = 0 self.plus_window =", "self.peaks[chrom].append(jp.pzpeak) self.peak_count += 1 for chrom,peaks in self.peaks.items(): self.peaks[chrom] =", "lambda a,b: cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom) class FileSet(object): def", "1 ''' i = 0 for chrom,peaks in joined_peaks.items(): for", "can_add(self,pzpeak): return not pzpeak.set_name in self.peaks def add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name]", "htype in cls.HEADER_TYPES: s += '\\t'.join( htype + '_' +", "jp.center+1) #matrix = np.zeros((npeaks,len(peaksets)*2)) #i = 0 #for chrom,peaks in", "in peaks: #for j,set_name in enumerate(peaksets.keys()): #control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage", "''' Usage: join_peaks.py [options] PEAKS CHIP INPUT [ (PEAKS CHIP", "self.end-self.start+1 class JoinedPeak(Peak): WIDTH = 0 HEADER = ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES", "= '\\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\t%.2f\\t%.2f\\t%.2f\\t' % ( max(peak_signals)/(min(peak_signals) + sys.float_info.epsilon),", "= jp.center+1) #matrix = np.zeros((npeaks,len(peaksets)*2)) #i = 0 #for chrom,peaks", "self.peaks = {} self.center = self.add(pzpeak) #pzpeak.center def can_add(self,pzpeak): return", "in self.peaks def add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name] = pzpeak return sum(p.center", "import docopt import peakzilla_qnorm_mapq_patched as pz __doc__ = ''' Usage:", "except: print max(peak_signals),min(peak_signals) raise return s class PeakScorer(pz.PeakContainer): def __init__(self,", "self.peaks[chrom]: # fill windows while plus_tags and plus_tags[0] <= (peak.position", "not in jp.peaks: #jp.peaks[set_name] = SlavePeak( #set_name,matrix[i][j],matrix[i][j + len(peaksets)]) #else:", "< (peak.position - self.peak_shift): self.plus_window.popleft() self.plus_mapq.popleft() while self.minus_window and self.minus_window[0]", "jp.peaks: #jp.peaks[set_name] = SlavePeak( #set_name,matrix[i][j],matrix[i][j + len(peaksets)]) #else: #jp.peaks[set_name].computed_chip =", "import csv import collections import docopt import peakzilla_qnorm_mapq_patched as pz", "'\\tNA\\tNA\\tNA\\t' + s except: print max(peak_signals),min(peak_signals) raise return s class", "collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '-')) self.plus_window = collections.deque([]) self.minus_window = collections.deque([])", "= jp.dist(peak) if dist >= 0 and dist <= maxdist:", "self.plus_mapq = collections.deque([]) self.minus_mapq = collections.deque([]) for peak in self.peaks[chrom]:", "peak_size self.peak_shift = (peak_size - 1) / 2 self.score_threshold =", "= collections.deque([]) self.minus_mapq = collections.deque([]) for peak in self.peaks[chrom]: #", "return '%s_%d' % (self.chrom,self.center) @classmethod def header(cls): s = '\\t'.join(cls.HEADER)", "dist = jp.dist(peak) if dist >= 0 and dist <=", "[] for set_name,peak in self.peaks.items(): if hasattr(peak,'score'): s += peak.name", "else: s = '\\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\tNA\\tNA\\tNA\\t' + s except:", "None for jp in joined_peaks[chrom]: dist = jp.dist(peak) if dist", "between the biggest and smallest ChIP peak, ChipSE and EnrichSE", "chrom,peaks in joined_peaks.items(): #for jp in peaks: #for j,set_name in", "import math import csv import collections import docopt import peakzilla_qnorm_mapq_patched", "BED files (CHIP and INPUT) as input to peakzilla. This", "+= '\\t'.join('%.2f' % x for x in [ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ])", "+\\ '\\t%.2f\\t%.2f\\t%.2f\\t' % ( max(peak_signals)/(min(peak_signals) + sys.float_info.epsilon), std_err(peak_signals), std_err(peak_enrichs), )", "= PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score = collections.defaultdict(list) for chrom,peaks in joined_peaks.items():", "in peaks: #for j,set_name in enumerate(peaksets.keys()): #if set_name not in", "+ '\\t' peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s", "jp.dist(peak) if dist >= 0 and dist <= maxdist: if", "start, #end = jp.center+JoinedPeak.WIDTH/2): #if aln.is_reverse: #score += minus_model[aln.pos-start] #else:", "collections.deque([]) self.plus_mapq = collections.deque([]) self.minus_mapq = collections.deque([]) for peak in", "= pzpeak return sum(p.center for p in self.peaks.values())/len(self.peaks) def name(self):", "s else: s = '\\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\tNA\\tNA\\tNA\\t' + s", "minus_tags[0] <= (peak.position + self.peak_shift): self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft()) # get rid", "#print sum(self.plus_mapq) , sum(self.minus_mapq), len(self.plus_mapq) , len(self.minus_mapq) #print peak.mapq_score setattr(peak,scoretype,self.calculate_score())", "100 and float(row[8]) >= 10: peakcounter += 1 if peakcounter", "closest is None or not closest.can_add(peak): npeaks += 1 joined_peaks[chrom].append(JoinedPeak(peak))", "= int(args['--max-distance']) peaksets = {} filesets = {} for peakfile,chipfile,controlfile", "= int(start) self.end = int(end) self.name = name self.center =", "while self.minus_window and self.minus_window[0] < (peak.position - self.peak_shift): self.minus_window.popleft() self.minus_mapq.popleft()", "max(0,jp.center-JoinedPeak.WIDTH/2) #for aln in fileset.get_file(type).fetch( #reference = jp.chrom, start =", "= float(self.computed_chip )/self.computed_control class SlavePeak(Peak): def __init__(self,set_name,center): self.name = 'Slave'", "for x in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) + '\\t' called_peaks += 1 #s", "minus_tags and minus_tags[0] <= (peak.position + self.peak_shift): self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft()) #", "peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH /= len(peaksets) # find closest", "def __str__(self): s = '' called_peaks = 0 peak_signals =", "else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\tNA\\t' if hasattr(peak,'pzpeak'): s += '\\t'.join('%.2f' %", "libtype == 'ip': peak.mapq_score = float(sum(self.plus_mapq) + sum(self.minus_mapq) )/max(1,(len(self.plus_mapq) +", "limit peaks maxpeaks = 20 peakcounter = 0 for row", "chrom,peaks in self.peaks.items(): self.peaks[chrom] = sorted(self.peaks[chrom], lambda a,b: cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score')", "self.peak_size jp.pzpeak.shift = self.peak_shift jp.pzpeak.position = jp.center jp.pzpeak.name = jp.name", "def get_file(self,type): return getattr(self, '%s_file' % type) def get_tagcount(self,type): return", "ChIP and Enrich values for the peaks. For each experinent", "= {} self.center = self.add(pzpeak) #pzpeak.center def can_add(self,pzpeak): return not", "+ self.peak_shift): self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft()) # get rid of old tags", "import os import sys import math import csv import collections", "jp.pzpeak.size = self.peak_size jp.pzpeak.shift = self.peak_shift jp.pzpeak.position = jp.center jp.pzpeak.name", "self.chip_tags, self.control_tags def get_file(self,type): return getattr(self, '%s_file' % type) def", "{} filesets = {} for peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name =", "= FileSet(peakfile,chipfile,controlfile) r = csv.reader(open(peakfile),delimiter='\\t') r.next() # header ''' #XXX:", "in [ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] ) + '\\t' return s def __str__(self):", "new peaks when there's no qualifying one npeaks = 0", "peaks maxpeaks = 20 peakcounter = 0 for row in", "= matrix[i][j] #jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment() #print jp #i +=", "distance to join peaks [default: 10] ''' args = docopt.docopt(__doc__)", "else: closest.add(peak) plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) for set_name,fileset in filesets.items(): scorer", "'_' + x for x in [ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] ) +", "= 0 peak_signals = [] peak_enrichs = [] for set_name,peak", "s = '\\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\tNA\\tNA\\tNA\\t' + s except: print", "mean = sum(l)/float(len(l)) variance = sum((x-mean)**2 for x in l)/(len(l)-1)", "= '' called_peaks = 0 peak_signals = [] peak_enrichs =", "self.control_tags(controlfile,True) #print self.chip_tags, self.control_tags def get_file(self,type): return getattr(self, '%s_file' %", "return s def __str__(self): s = '' called_peaks = 0", "(also output regardless of the presence of a peak). Options:", "20 peakcounter = 0 for row in r: if float(row[5])", "/= len(peaksets) # find closest peak to each peak in", "jp.chrom, #start = max(0,jp.center-JoinedPeak.WIDTH/2), #end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start = jp.center,", "float(fold_enrichment) self.distribution_score = float(distribution_score) self.fdr = float(fdr) def width(self): return", "jp.peaks[set_name] = SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score) print JoinedPeak.header() for chrom,peaks in", "#reference = jp.chrom, #start = max(0,jp.center-JoinedPeak.WIDTH/2), #end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start", "s except: print max(peak_signals),min(peak_signals) raise return s class PeakScorer(pz.PeakContainer): def", "# find closest peak to each peak in the new", "for set_name,peak in self.peaks.items(): if hasattr(peak,'score'): s += peak.name +", "aln in fileset.get_file(type).fetch( #reference = jp.chrom, start = start, #end", "closest.can_add(peak): npeaks += 1 joined_peaks[chrom].append(JoinedPeak(peak)) else: closest.add(peak) plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH)", "each ChIP experiment, input a PEAKS file as otuput by", "#start = max(0,jp.center-JoinedPeak.WIDTH/2) #for aln in fileset.get_file(type).fetch( #reference = jp.chrom,", "the presence of a peak). Options: --max-distance=DIST maximum summit distance", "= chipfile self.chip_tags = pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True) self.control_file = controlfile self.control_tags", "self.distribution_score = float(distribution_score) self.fdr = float(fdr) def width(self): return self.end-self.start+1", "in the new set # make new peaks when there's", "in enumerate(peaksets.keys()): #if set_name not in jp.peaks: #jp.peaks[set_name] = SlavePeak(", "for jp in peaks: if set_name not in jp.peaks: jp.peaks[set_name]", "about the peaks are output: 'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All 'PZ' columns", "hasattr(peak,'score'): s += peak.name + '\\t' + '\\t'.join('%.2f' % x", "type) def get_tagcount(self,type): return getattr(self, '%s_tags' % type) maxdist =", "closest.dist(peak) > dist: closest = jp if closest is None", "table with 3 columns identifying the peaks (Chromosome, Start, End,", "self.start = int(start) self.end = int(end) self.name = name self.center", "in peak_dict.items(): for jp in peaks: jp.pzpeak = pz.Peak() jp.pzpeak.size", "[peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) + '\\t' called_peaks += 1 #s += '%.1f\\t%.1f\\t%.1f\\t%.1f\\t' %", "plus_tags[0] <= (peak.position + self.peak_shift): self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft()) while minus_tags and", "= pz.generate_ideal_model(JoinedPeak.WIDTH) for set_name,fileset in filesets.items(): scorer = PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model)", "output: 'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All 'PZ' columns are the original output", "stddev(l): mean = sum(l)/float(len(l)) variance = sum((x-mean)**2 for x in", "self.set_name = set_name self.chrom = chrom self.start = int(start) self.end", "(peak.position + self.peak_shift): self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft()) # get rid of old", "# get rid of old tags not fitting in the", "qualifying one npeaks = 0 joined_peaks = collections.defaultdict(list) for set_name,peakset", "input a PEAKS file as otuput by peakzilla, and 2", "#jp.peaks[set_name].computed_chip = matrix[i][j] #jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment() #print jp #i", "experiments, Spread is the difference between the biggest and smallest", "set_name self.center = center class PZPeak(Peak): def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr): self.set_name", "float(fdr) def width(self): return self.end-self.start+1 class JoinedPeak(Peak): WIDTH = 0", "if over threshold self.position = peak.position if libtype == 'ip':", "jp.center, #end = jp.center+1) #matrix = np.zeros((npeaks,len(peaksets)*2)) #i = 0", "columns are the original output from peakzilla and the remaining", "= float(score) self.chip = float(chip) self.control = float(control) self.fold_enrichment =", "= [] for set_name,peak in self.peaks.items(): if hasattr(peak,'score'): s +=", "self.peak_shift): self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft()) while minus_tags and minus_tags[0] <= (peak.position +", "0 for row in r: if float(row[5]) >= 100 and", "else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s += '%.1f\\t%.1f\\t%.1f\\t' % (", "peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH /= len(peaksets) # find closest peak to each", "jp.center+JoinedPeak.WIDTH/2): #if aln.is_reverse: #score += minus_model[aln.pos-start] #else: #score += plus_model[aln.pos-start]", "called among all the ChIP experiments, Spread is the difference", "of old tags not fitting in the window any more", "#jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment() #print jp #i += 1 '''", "self.set_name = set_name self.center = center class PZPeak(Peak): def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control,", "number of peaks that were called among all the ChIP", "#for jp in peaks: #for j,set_name in enumerate(peaksets.keys()): #if set_name", "= center class PZPeak(Peak): def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr): self.set_name = set_name", "presence of a peak). Options: --max-distance=DIST maximum summit distance to", "0 self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.position = 0", "self.minus_mapq.popleft() # calculate normalized background level # add position to", "for jp in joined_peaks[chrom]: dist = jp.dist(peak) if dist >=", "if closest is None or closest.dist(peak) > dist: closest =", "s += '\\t'.join( htype + '_' + x for x", "['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES = set() def __init__(self,pzpeak): self.chrom = pzpeak.chrom self.peaks", "maxpeaks: break peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) ''' for row in r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH", "float(distribution_score) self.fdr = float(fdr) def width(self): return self.end-self.start+1 class JoinedPeak(Peak):", "docopt import peakzilla_qnorm_mapq_patched as pz __doc__ = ''' Usage: join_peaks.py", "joined_peaks.items(): for jp in peaks: for j,set_name in enumerate(filesets.keys()): matrix[i][j]", "= collections.defaultdict(list) for set_name,peakset in peaksets.items(): for chrom,peaks in peakset.items():", "#pzpeak.center def can_add(self,pzpeak): return not pzpeak.set_name in self.peaks def add(self,pzpeak):", "and INPUT) as input to peakzilla. This will output a", "joined_peaks.items(): #for jp in peaks: #for j,set_name in enumerate(peaksets.keys()): #if", "+ self.peak_shift): self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft()) while minus_tags and minus_tags[0] <= (peak.position", "and minus_tags[0] <= (peak.position + self.peak_shift): self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft()) # get", "'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All 'PZ' columns are the original output from", "% (self.chrom,self.center) @classmethod def header(cls): s = '\\t'.join(cls.HEADER) + '\\t'", "SlavePeak(Peak): def __init__(self,set_name,center): self.name = 'Slave' self.set_name = set_name self.center", "+ '\\t' return s def __str__(self): s = '' called_peaks", "collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '+')) minus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '-'))", "% ( #peak.score,peak.chip,peak.control,peak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\tNA\\t' if hasattr(peak,'pzpeak'): s", "header(cls): s = '\\t'.join(cls.HEADER) + '\\t' #'#Chromosome\\tPosition\\tNPeaks\\tSpread\\t' for htype in", "a peak). Options: --max-distance=DIST maximum summit distance to join peaks", "the biggest and smallest ChIP peak, ChipSE and EnrichSE are", "jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start = jp.center, #end = jp.center+1) #matrix = np.zeros((npeaks,len(peaksets)*2))", "= 0 #for chrom,peaks in joined_peaks.items(): #for jp in peaks:", "and EnrichSE are the standard error on the mean among", "Options: --max-distance=DIST maximum summit distance to join peaks [default: 10]", "def can_add(self,pzpeak): return not pzpeak.set_name in self.peaks def add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name)", "the original output from peakzilla and the remaining columns are", "'\\t' + '\\t'.join('%.2f' % x for x in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) +", "np.zeros((npeaks,len(peaksets)*2)) #i = 0 #for chrom,peaks in joined_peaks.items(): #for jp", "return getattr(self, '%s_file' % type) def get_tagcount(self,type): return getattr(self, '%s_tags'", "get_file(self,type): return getattr(self, '%s_file' % type) def get_tagcount(self,type): return getattr(self,", "= 0 joined_peaks = collections.defaultdict(list) for set_name,peakset in peaksets.items(): for", "in [ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ]) + '\\t' peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment) else: s", "peaks: if set_name not in jp.peaks: jp.peaks[set_name] = SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name])", "jp in peaks: #for j,set_name in enumerate(peaksets.keys()): #control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1)", "all the ChIP experiments, Spread is the difference between the", "the peaks are output: 'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All 'PZ' columns are", "#XXX: limit peaks maxpeaks = 20 peakcounter = 0 for", "self.center = self.add(pzpeak) #pzpeak.center def can_add(self,pzpeak): return not pzpeak.set_name in", "= max(0,jp.center-JoinedPeak.WIDTH/2) #for aln in fileset.get_file(type).fetch( #reference = jp.chrom, start", "for peak in self.peaks[chrom]: # fill windows while plus_tags and", "for chrom,peaks in peakset.items(): for peak in peaks: closest =", "+ '_' + x for x in [ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] )", "peakzilla. For each ChIP experiment, input a PEAKS file as", "zip(self.minus_window,self.minus_mapq) #print sum(self.plus_mapq) , sum(self.minus_mapq), len(self.plus_mapq) , len(self.minus_mapq) #print peak.mapq_score", "joined_peaks[chrom]: dist = jp.dist(peak) if dist >= 0 and dist", "1 joined_peaks[chrom].append(JoinedPeak(peak)) else: closest.add(peak) plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) for set_name,fileset in", "self.peaks[pzpeak.set_name] = pzpeak return sum(p.center for p in self.peaks.values())/len(self.peaks) def", "#score += plus_model[aln.pos-start] #return (score+pseudocount)*10.**6/fileset.get_tagcount(type) #return 10.**6*fileset.get_file(type).count( #reference = jp.chrom,", "+ '\\t' + '\\t'.join('%.2f' % x for x in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr])", "= sum(l)/float(len(l)) variance = sum((x-mean)**2 for x in l)/(len(l)-1) return", "get rid of old tags not fitting in the window", "= int(end) self.name = name self.center = int(summit) self.score =", "rid of old tags not fitting in the window any", "sorted(self.peaks[chrom], lambda a,b: cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom) class FileSet(object):", "= 0 for row in r: if float(row[5]) >= 100", "/ 2 self.score_threshold = 10 self.plus_model = plus_model self.minus_model =", "def get_tagcount(self,type): return getattr(self, '%s_tags' % type) maxdist = int(args['--max-distance'])", "= pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True) #print self.chip_tags, self.control_tags def get_file(self,type): return getattr(self,", "for htype in cls.HEADER_TYPES: s += '\\t'.join( htype + '_'", "def compute_fold_enrichment(self): self.computed_fold_enrichment = float(self.computed_chip )/self.computed_control class SlavePeak(Peak): def __init__(self,set_name,center):", "output regardless of the presence of a peak). Options: --max-distance=DIST", "enumerate(filesets.keys()): matrix[i][j] = float(jp.peaks[set_name].computed_chip) matrix[i][j+len(peaksets)] = float(jp.peaks[set_name].computed_control) i += 1", "int(end) self.name = name self.center = int(summit) self.score = float(score)", "self.peaks.items(): if hasattr(peak,'score'): s += peak.name + '\\t' + '\\t'.join('%.2f'", "if set_name not in jp.peaks: jp.peaks[set_name] = SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score)", "is None or not closest.can_add(peak): npeaks += 1 joined_peaks[chrom].append(JoinedPeak(peak)) else:", "abs(self.center-other.center) else: return -1 def compute_fold_enrichment(self): self.computed_fold_enrichment = float(self.computed_chip )/self.computed_control", "scorer = PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score = collections.defaultdict(list) for chrom,peaks in", "in the window any more while self.plus_window and self.plus_window[0] <", "__init__(self,set_name,center): self.name = 'Slave' self.set_name = set_name self.center = center", "peak_dict.items(): for jp in peaks: jp.pzpeak = pz.Peak() jp.pzpeak.size =", "type) maxdist = int(args['--max-distance']) peaksets = {} filesets = {}", "{} self.center = self.add(pzpeak) #pzpeak.center def can_add(self,pzpeak): return not pzpeak.set_name", "peakset.items(): for peak in peaks: closest = None for jp", "sys import math import csv import collections import docopt import", "std_err(peak_enrichs), ) + s else: s = '\\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\", "minus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '-')) minus_mapq = collections.deque(getattr(self,'%s_tags' %", "set() def __init__(self,pzpeak): self.chrom = pzpeak.chrom self.peaks = {} self.center", "ChipSE and EnrichSE are the standard error on the mean", "+= 1 #s += '%.1f\\t%.1f\\t%.1f\\t%.1f\\t' % ( #peak.score,peak.chip,peak.control,peak.fold_enrichment) else: s", "self.peak_count = 0 self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.position", "< (peak.position - self.peak_shift): self.minus_window.popleft() self.minus_mapq.popleft() # calculate normalized background", "[] peak_enrichs = [] for set_name,peak in self.peaks.items(): if hasattr(peak,'score'):", "self.minus_window = collections.deque([]) self.plus_mapq = collections.deque([]) self.minus_mapq = collections.deque([]) for", "> maxpeaks: break peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) ''' for row in r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row))", "tags not fitting in the window any more while self.plus_window", "peaks when there's no qualifying one npeaks = 0 joined_peaks", "= None for jp in joined_peaks[chrom]: dist = jp.dist(peak) if", "plus_model, minus_model): self.ip_tags = ip_tags self.control_tags = control_tags self.peak_size =", "self.ip_tags = ip_tags self.control_tags = control_tags self.peak_size = peak_size self.peak_shift", "{} for peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name = os.path.basename(peakfile).split('.')[0] peaksets[set_name] =", "@classmethod def header(cls): s = '\\t'.join(cls.HEADER) + '\\t' #'#Chromosome\\tPosition\\tNPeaks\\tSpread\\t' for", "= collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '-')) minus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom,", "#'#Chromosome\\tPosition\\tNPeaks\\tSpread\\t' for htype in cls.HEADER_TYPES: s += '\\t'.join( htype +", "peak_enrichs.append(peak.pzpeak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s += '%.1f\\t%.1f\\t%.1f\\t' %", "self.name = 'Slave' self.set_name = set_name self.center = center class", "libtype).get_mapq(chrom, '+')) minus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '-')) minus_mapq =", "peaksets = {} filesets = {} for peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']):", "JoinedPeak.WIDTH /= len(peaksets) # find closest peak to each peak", "plus_model[aln.pos-start] #return (score+pseudocount)*10.**6/fileset.get_tagcount(type) #return 10.**6*fileset.get_file(type).count( #reference = jp.chrom, #start =", "joined_peaks.items(): #for jp in peaks: #for j,set_name in enumerate(peaksets.keys()): #control_coverage", "j,set_name in enumerate(peaksets.keys()): #if set_name not in jp.peaks: #jp.peaks[set_name] =", "common between multiple ChIP experiments determined by peakzilla. For each", "return math.sqrt(variance) def std_err(l): return stddev(l)/math.sqrt(len(l)) class Peak(object): def dist(self,other):", "s = '\\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\t%.2f\\t%.2f\\t%.2f\\t' % ( max(peak_signals)/(min(peak_signals) +", "the peaks (Chromosome, Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies the number", "plus_tags and plus_tags[0] <= (peak.position + self.peak_shift): self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft()) while", "#s += '\\t'.join([str(x) for x in #[peak.score,peak.chip,peak.fold_enrichment]]) try: if len(peak_signals):", "x in [ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] ) + '\\t' return s def", "self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft()) # get rid of old tags not fitting", "jp.name self.peaks[chrom].append(jp.pzpeak) self.peak_count += 1 for chrom,peaks in self.peaks.items(): self.peaks[chrom]", "+ '\\t'.join('%.2f' % x for x in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) + '\\t'", "= 20 peakcounter = 0 for row in r: if", "input to peakzilla. This will output a table with 3", "(CHIP and INPUT) as input to peakzilla. This will output", "= pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True) self.control_file = controlfile self.control_tags = pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True)", "if dist >= 0 and dist <= maxdist: if closest", "SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score) print JoinedPeak.header() for chrom,peaks in joined_peaks.items(): for", "''' i = 0 for chrom,peaks in joined_peaks.items(): for jp", "return abs(self.center-other.center) else: return -1 def compute_fold_enrichment(self): self.computed_fold_enrichment = float(self.computed_chip", "r: if float(row[5]) >= 100 and float(row[8]) >= 10: peakcounter", "0 joined_peaks = collections.defaultdict(list) for set_name,peakset in peaksets.items(): for chrom,peaks", "if peakcounter > maxpeaks: break peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) ''' for row in", "= collections.deque([]) for peak in self.peaks[chrom]: # fill windows while", "#score = 0 #start = max(0,jp.center-JoinedPeak.WIDTH/2) #for aln in fileset.get_file(type).fetch(", "(score+pseudocount)*10.**6/fileset.get_tagcount(type) #return 10.**6*fileset.get_file(type).count( #reference = jp.chrom, #start = max(0,jp.center-JoinedPeak.WIDTH/2), #end", "not in jp.peaks: jp.peaks[set_name] = SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score) print JoinedPeak.header()", "multiple ChIP experiments determined by peakzilla. For each ChIP experiment,", "1 #quantile_normalize.quantile_norm(matrix) #i = 0 #for chrom,peaks in joined_peaks.items(): #for", "standard error on the mean among the ChIP and Enrich", "peaks are output: 'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All 'PZ' columns are the", "+= 'NA\\tNA\\tNA\\tNA\\tNA\\tNA\\t' if hasattr(peak,'pzpeak'): s += '\\t'.join('%.2f' % x for", "= 0 def fill_scores(self,chrom,libtype,scoretype): plus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '+'))", "'-')) minus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '-')) self.plus_window = collections.deque([])", "self.name = name self.center = int(summit) self.score = float(score) self.chip", "'ip': peak.mapq_score = float(sum(self.plus_mapq) + sum(self.minus_mapq) )/max(1,(len(self.plus_mapq) + len(self.minus_mapq))) #if", "= float(fold_enrichment) self.distribution_score = float(distribution_score) self.fdr = float(fdr) def width(self):", "= '\\t'.join(cls.HEADER) + '\\t' #'#Chromosome\\tPosition\\tNPeaks\\tSpread\\t' for htype in cls.HEADER_TYPES: s", "jp in peaks: #for j,set_name in enumerate(peaksets.keys()): #if set_name not", "def __init__(self, ip_tags, control_tags, peak_size, plus_model, minus_model): self.ip_tags = ip_tags", "math import csv import collections import docopt import peakzilla_qnorm_mapq_patched as", "FileSet(object): def __init__(self,peakfile,chipfile,controlfile): self.peakfile = peakfile self.chip_file = chipfile self.chip_tags", "jp in peaks: if set_name not in jp.peaks: jp.peaks[set_name] =", "= collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '+')) plus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom,", "self.peaks.items(): self.peaks[chrom] = sorted(self.peaks[chrom], lambda a,b: cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom)", "(PEAKS CHIP INPUT) ... ] This script finds peaks in", "and 2 BED files (CHIP and INPUT) as input to", "__str__(self): s = '' called_peaks = 0 peak_signals = []", "to region if over threshold self.position = peak.position if libtype", "summit distance to join peaks [default: 10] ''' args =", "#for j,set_name in enumerate(peaksets.keys()): #if set_name not in jp.peaks: #jp.peaks[set_name]", "= control_tags self.peak_size = peak_size self.peak_shift = (peak_size - 1)", "( max(peak_signals)/(min(peak_signals) + sys.float_info.epsilon), std_err(peak_signals), std_err(peak_enrichs), ) + s else:", "chrom,peaks in peak_dict.items(): for jp in peaks: jp.pzpeak = pz.Peak()", "for j,set_name in enumerate(filesets.keys()): matrix[i][j] = float(jp.peaks[set_name].computed_chip) matrix[i][j+len(peaksets)] = float(jp.peaks[set_name].computed_control)", "+ sys.float_info.epsilon), std_err(peak_signals), std_err(peak_enrichs), ) + s else: s =", "peakzilla, and 2 BED files (CHIP and INPUT) as input", "getattr(self, '%s_tags' % type) maxdist = int(args['--max-distance']) peaksets = {}", "other.chrom: return abs(self.center-other.center) else: return -1 def compute_fold_enrichment(self): self.computed_fold_enrichment =", "and the remaining columns are re-calculated in this script (also", "peakcounter += 1 if peakcounter > maxpeaks: break peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) '''", "for chrom,peaks in joined_peaks.items(): for peak in peaks: print peak", "columns are re-calculated in this script (also output regardless of", "len(peak_signals): s = '\\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\t%.2f\\t%.2f\\t%.2f\\t' % ( max(peak_signals)/(min(peak_signals)", "in joined_peaks.items(): for jp in peaks: if set_name not in", "i = 0 for chrom,peaks in joined_peaks.items(): for jp in", "= 0 for chrom,peaks in joined_peaks.items(): for jp in peaks:", "#set_name,matrix[i][j],matrix[i][j + len(peaksets)]) #else: #jp.peaks[set_name].computed_chip = matrix[i][j] #jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)]", "self.computed_fold_enrichment = float(self.computed_chip )/self.computed_control class SlavePeak(Peak): def __init__(self,set_name,center): self.name =", "jp in joined_peaks[chrom]: dist = jp.dist(peak) if dist >= 0", "over threshold self.position = peak.position if libtype == 'ip': peak.mapq_score", "= jp.center+JoinedPeak.WIDTH/2): #if aln.is_reverse: #score += minus_model[aln.pos-start] #else: #score +=", "<= (peak.position + self.peak_shift): self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft()) # get rid of", "SlavePeak( #set_name,matrix[i][j],matrix[i][j + len(peaksets)]) #else: #jp.peaks[set_name].computed_chip = matrix[i][j] #jp.peaks[set_name].computed_control =", "smallest ChIP peak, ChipSE and EnrichSE are the standard error", "called_peaks = 0 peak_signals = [] peak_enrichs = [] for", "None or closest.dist(peak) > dist: closest = jp if closest", "#start = max(0,jp.center-JoinedPeak.WIDTH/2), #end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start = jp.center, #end", "p in self.peaks.values())/len(self.peaks) def name(self): return '%s_%d' % (self.chrom,self.center) @classmethod", "int(args['--max-distance']) peaksets = {} filesets = {} for peakfile,chipfile,controlfile in", "''' #XXX: limit peaks maxpeaks = 20 peakcounter = 0", "def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr): self.set_name = set_name self.chrom = chrom self.start", "maxdist = int(args['--max-distance']) peaksets = {} filesets = {} for", "#def get_coverage(fileset,type,jp,pseudocount=0): #score = 0 #start = max(0,jp.center-JoinedPeak.WIDTH/2) #for aln", "the new set # make new peaks when there's no", "otuput by peakzilla, and 2 BED files (CHIP and INPUT)", "for x in #[peak.score,peak.chip,peak.fold_enrichment]]) try: if len(peak_signals): s = '\\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2),", "width(self): return self.end-self.start+1 class JoinedPeak(Peak): WIDTH = 0 HEADER =", "x for x in [ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] ) + '\\t' return", "= plus_model self.minus_model = minus_model self.peaks = collections.defaultdict(list) self.peak_count =", "collections.defaultdict(list) for set_name,peakset in peaksets.items(): for chrom,peaks in peakset.items(): for", "the ChIP experiments, Spread is the difference between the biggest", "self.peaks def add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name] = pzpeak return sum(p.center for", "#! /usr/bin/env python import os import sys import math import", "while plus_tags and plus_tags[0] <= (peak.position + self.peak_shift): self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft())", "sum(self.plus_mapq) , sum(self.minus_mapq), len(self.plus_mapq) , len(self.minus_mapq) #print peak.mapq_score setattr(peak,scoretype,self.calculate_score()) def", "x for x in [ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ]) + '\\t' peak_signals.append(peak.pzpeak.nrom_signal)", "j,set_name in enumerate(peaksets.keys()): #control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage = get_coverage(filesets[set_name],'chip',jp) #matrix[i][j]", "(peak_size - 1) / 2 self.score_threshold = 10 self.plus_model =", "collections.deque([]) self.minus_window = collections.deque([]) self.plus_mapq = collections.deque([]) self.minus_mapq = collections.deque([])", "= docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True) def stddev(l): mean = sum(l)/float(len(l)) variance =", "the remaining columns are re-calculated in this script (also output", "(peak.position + self.peak_shift): self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft()) while minus_tags and minus_tags[0] <=", "== 'ip': peak.mapq_score = float(sum(self.plus_mapq) + sum(self.minus_mapq) )/max(1,(len(self.plus_mapq) + len(self.minus_mapq)))", "len(self.minus_mapq))) #if peak.name == 'Peak_12869': #print zip(self.plus_window,self.plus_mapq) #print zip(self.minus_window,self.minus_mapq) #print", "peaks [default: 10] ''' args = docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True) def stddev(l):", "JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH /= len(peaksets) # find closest peak", "% type) def get_tagcount(self,type): return getattr(self, '%s_tags' % type) maxdist", "All 'PZ' columns are the original output from peakzilla and", "peak #plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) #def get_coverage(fileset,type,jp,pseudocount=0): #score = 0 #start", "'\\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\t%.2f\\t%.2f\\t%.2f\\t' % ( max(peak_signals)/(min(peak_signals) + sys.float_info.epsilon), std_err(peak_signals),", "among all the ChIP experiments, Spread is the difference between", "+= 1 joined_peaks[chrom].append(JoinedPeak(peak)) else: closest.add(peak) plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) for set_name,fileset", "0 #start = max(0,jp.center-JoinedPeak.WIDTH/2) #for aln in fileset.get_file(type).fetch( #reference =", "= pz.Peak() jp.pzpeak.size = self.peak_size jp.pzpeak.shift = self.peak_shift jp.pzpeak.position =", "def name(self): return '%s_%d' % (self.chrom,self.center) @classmethod def header(cls): s", "not pzpeak.set_name in self.peaks def add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name] = pzpeak", "set_name,fileset in filesets.items(): scorer = PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score = collections.defaultdict(list)", "#end = jp.center+1) #matrix = np.zeros((npeaks,len(peaksets)*2)) #i = 0 #for", "import sys import math import csv import collections import docopt", "def add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name] = pzpeak return sum(p.center for p", "+ s else: s = '\\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\tNA\\tNA\\tNA\\t' +", "region if over threshold self.position = peak.position if libtype ==", "peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name = os.path.basename(peakfile).split('.')[0] peaksets[set_name] = collections.defaultdict(list) filesets[set_name]", "of peaks that were called among all the ChIP experiments,", "#peak.score,peak.chip,peak.control,peak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\tNA\\t' if hasattr(peak,'pzpeak'): s += '\\t'.join('%.2f'", "closest.add(peak) plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) for set_name,fileset in filesets.items(): scorer =", "= self.peak_shift jp.pzpeak.position = jp.center jp.pzpeak.name = jp.name self.peaks[chrom].append(jp.pzpeak) self.peak_count", "center class PZPeak(Peak): def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr): self.set_name = set_name self.chrom", "scorer.score_peaks(peaks_to_score) print JoinedPeak.header() for chrom,peaks in joined_peaks.items(): for peak in", "10] ''' args = docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True) def stddev(l): mean =", "try: if len(peak_signals): s = '\\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)]) +\\ '\\t%.2f\\t%.2f\\t%.2f\\t' %", "each experinent \"X\", information about the peaks are output: 'XPZName','XPZScore',", "__init__(self,peakfile,chipfile,controlfile): self.peakfile = peakfile self.chip_file = chipfile self.chip_tags = pz.TagContainer(store_mapq=True)", "float(self.computed_chip )/self.computed_control class SlavePeak(Peak): def __init__(self,set_name,center): self.name = 'Slave' self.set_name", "self.fold_enrichment = float(fold_enrichment) self.distribution_score = float(distribution_score) self.fdr = float(fdr) def", "def fill_scores(self,chrom,libtype,scoretype): plus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '+')) plus_mapq =", "'+')) minus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '-')) minus_mapq = collections.deque(getattr(self,'%s_tags'", "dist: closest = jp if closest is None or not", "#matrix[i][j+len(peaksets)] = float(control_coverage) #i += 1 #quantile_normalize.quantile_norm(matrix) #i = 0", "as otuput by peakzilla, and 2 BED files (CHIP and", "= float(control) self.fold_enrichment = float(fold_enrichment) self.distribution_score = float(distribution_score) self.fdr =", "joined_peaks = collections.defaultdict(list) for set_name,peakset in peaksets.items(): for chrom,peaks in", "the mean among the ChIP and Enrich values for the", "\"X\", information about the peaks are output: 'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All", "self.plus_window and self.plus_window[0] < (peak.position - self.peak_shift): self.plus_window.popleft() self.plus_mapq.popleft() while", "variance = sum((x-mean)**2 for x in l)/(len(l)-1) return math.sqrt(variance) def", "old tags not fitting in the window any more while", "peaks: for j,set_name in enumerate(filesets.keys()): matrix[i][j] = float(jp.peaks[set_name].computed_chip) matrix[i][j+len(peaksets)] =", "script (also output regardless of the presence of a peak).", "pz __doc__ = ''' Usage: join_peaks.py [options] PEAKS CHIP INPUT", "This will output a table with 3 columns identifying the", "+= '\\t'.join( htype + '_' + x for x in", "for peak in peaks: closest = None for jp in", "peaks: jp.pzpeak = pz.Peak() jp.pzpeak.size = self.peak_size jp.pzpeak.shift = self.peak_shift", "matrix[i][j] = float(jp.peaks[set_name].computed_chip) matrix[i][j+len(peaksets)] = float(jp.peaks[set_name].computed_control) i += 1 '''", "more while self.plus_window and self.plus_window[0] < (peak.position - self.peak_shift): self.plus_window.popleft()", "+= 1 if peakcounter > maxpeaks: break peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) ''' for", "are the standard error on the mean among the ChIP", "self.score = float(score) self.chip = float(chip) self.control = float(control) self.fold_enrichment", "Usage: join_peaks.py [options] PEAKS CHIP INPUT [ (PEAKS CHIP INPUT)", "= ip_tags self.control_tags = control_tags self.peak_size = peak_size self.peak_shift =", "class FileSet(object): def __init__(self,peakfile,chipfile,controlfile): self.peakfile = peakfile self.chip_file = chipfile", "self.peak_count += 1 for chrom,peaks in self.peaks.items(): self.peaks[chrom] = sorted(self.peaks[chrom],", "#matrix = np.zeros((npeaks,len(peaksets)*2)) #i = 0 #for chrom,peaks in joined_peaks.items():", "plus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '+')) minus_tags = collections.deque(getattr(self,'%s_tags' %", "libtype).get_tags(chrom, '+')) plus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '+')) minus_tags =", "+= peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH /= len(peaksets) # find closest peak to", "npeaks = 0 joined_peaks = collections.defaultdict(list) for set_name,peakset in peaksets.items():", "peaks (Chromosome, Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies the number of", "peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) ''' for row in r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width()", "dist(self,other): if self.chrom == other.chrom: return abs(self.center-other.center) else: return -1", "else: return -1 def compute_fold_enrichment(self): self.computed_fold_enrichment = float(self.computed_chip )/self.computed_control class", "args = docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True) def stddev(l): mean = sum(l)/float(len(l)) variance", "0 HEADER = ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES = set() def __init__(self,pzpeak): self.chrom", "for x in l)/(len(l)-1) return math.sqrt(variance) def std_err(l): return stddev(l)/math.sqrt(len(l))", "float(score) self.chip = float(chip) self.control = float(control) self.fold_enrichment = float(fold_enrichment)", "row in r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH /= len(peaksets)", "max(0,jp.center-JoinedPeak.WIDTH/2), #end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start = jp.center, #end = jp.center+1)", "files (CHIP and INPUT) as input to peakzilla. This will", "peak in self.peaks[chrom]: # fill windows while plus_tags and plus_tags[0]", "#chip_coverage = get_coverage(filesets[set_name],'chip',jp) #matrix[i][j] = float(chip_coverage) #matrix[i][j+len(peaksets)] = float(control_coverage) #i", "[ (PEAKS CHIP INPUT) ... ] This script finds peaks", "= csv.reader(open(peakfile),delimiter='\\t') r.next() # header ''' #XXX: limit peaks maxpeaks", "= pzpeak.chrom self.peaks = {} self.center = self.add(pzpeak) #pzpeak.center def", "std_err(l): return stddev(l)/math.sqrt(len(l)) class Peak(object): def dist(self,other): if self.chrom ==", "set_name,peak in self.peaks.items(): if hasattr(peak,'score'): s += peak.name + '\\t'", "filesets.items(): scorer = PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score = collections.defaultdict(list) for chrom,peaks", "are the original output from peakzilla and the remaining columns", "any more while self.plus_window and self.plus_window[0] < (peak.position - self.peak_shift):", "= self.peak_size jp.pzpeak.shift = self.peak_shift jp.pzpeak.position = jp.center jp.pzpeak.name =", "None or not closest.can_add(peak): npeaks += 1 joined_peaks[chrom].append(JoinedPeak(peak)) else: closest.add(peak)", "+ x for x in [ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] ) + '\\t'", "'NA\\tNA\\tNA\\tNA\\tNA\\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s += '%.1f\\t%.1f\\t%.1f\\t' % ( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s +=", "__init__(self, ip_tags, control_tags, peak_size, plus_model, minus_model): self.ip_tags = ip_tags self.control_tags", "= jp.name self.peaks[chrom].append(jp.pzpeak) self.peak_count += 1 for chrom,peaks in self.peaks.items():", "in peaks: jp.pzpeak = pz.Peak() jp.pzpeak.size = self.peak_size jp.pzpeak.shift =", "#for j,set_name in enumerate(peaksets.keys()): #control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage = get_coverage(filesets[set_name],'chip',jp)", "0 and dist <= maxdist: if closest is None or", "peak.position if libtype == 'ip': peak.mapq_score = float(sum(self.plus_mapq) + sum(self.minus_mapq)", "sys.float_info.epsilon), std_err(peak_signals), std_err(peak_enrichs), ) + s else: s = '\\t'.join([self.chrom,str(self.center),", "= peak.position if libtype == 'ip': peak.mapq_score = float(sum(self.plus_mapq) +", "get_coverage(fileset,type,jp,pseudocount=0): #score = 0 #start = max(0,jp.center-JoinedPeak.WIDTH/2) #for aln in", "information about the peaks are output: 'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All 'PZ'", "for jp in peaks: jp.pzpeak = pz.Peak() jp.pzpeak.size = self.peak_size", "for set_name,fileset in filesets.items(): scorer = PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score =", "= collections.defaultdict(list) for chrom,peaks in joined_peaks.items(): for jp in peaks:", "jp.peaks: jp.peaks[set_name] = SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score) print JoinedPeak.header() for chrom,peaks", "#print self.chip_tags, self.control_tags def get_file(self,type): return getattr(self, '%s_file' % type)", "self.control_tags def get_file(self,type): return getattr(self, '%s_file' % type) def get_tagcount(self,type):", "std_err(peak_signals), std_err(peak_enrichs), ) + s else: s = '\\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)])", "= float(chip) self.control = float(control) self.fold_enrichment = float(fold_enrichment) self.distribution_score =", "of the presence of a peak). Options: --max-distance=DIST maximum summit", "= [] peak_enrichs = [] for set_name,peak in self.peaks.items(): if", "plus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '+')) plus_mapq = collections.deque(getattr(self,'%s_tags' %", "break peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) ''' for row in r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH +=", "closest peak to each peak in the new set #", "#if aln.is_reverse: #score += minus_model[aln.pos-start] #else: #score += plus_model[aln.pos-start] #return", "#return 10.**6*fileset.get_file(type).count( #reference = jp.chrom, #start = max(0,jp.center-JoinedPeak.WIDTH/2), #end =", "+= minus_model[aln.pos-start] #else: #score += plus_model[aln.pos-start] #return (score+pseudocount)*10.**6/fileset.get_tagcount(type) #return 10.**6*fileset.get_file(type).count(", "peaks: #for j,set_name in enumerate(peaksets.keys()): #control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage =", "self.plus_window.popleft() self.plus_mapq.popleft() while self.minus_window and self.minus_window[0] < (peak.position - self.peak_shift):", "self.control = float(control) self.fold_enrichment = float(fold_enrichment) self.distribution_score = float(distribution_score) self.fdr", "x in #[peak.score,peak.chip,peak.fold_enrichment]]) try: if len(peak_signals): s = '\\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)])", "jp in peaks: for j,set_name in enumerate(filesets.keys()): matrix[i][j] = float(jp.peaks[set_name].computed_chip)", "in enumerate(peaksets.keys()): #control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage = get_coverage(filesets[set_name],'chip',jp) #matrix[i][j] =", "get_coverage(filesets[set_name],'chip',jp) #matrix[i][j] = float(chip_coverage) #matrix[i][j+len(peaksets)] = float(control_coverage) #i += 1", "- self.peak_shift): self.minus_window.popleft() self.minus_mapq.popleft() # calculate normalized background level #", "]) + '\\t' peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment) else: s += 'NA\\tNA\\tNA\\tNA\\tNA\\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment", "cls.HEADER_TYPES: s += '\\t'.join( htype + '_' + x for", "collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '-')) minus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '-'))", "pzpeak.chrom self.peaks = {} self.center = self.add(pzpeak) #pzpeak.center def can_add(self,pzpeak):", "#jp.peaks[set_name].compute_fold_enrichment() #print jp #i += 1 ''' i = 0", "def stddev(l): mean = sum(l)/float(len(l)) variance = sum((x-mean)**2 for x", "the peaks. For each experinent \"X\", information about the peaks", "0 def fill_scores(self,chrom,libtype,scoretype): plus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '+')) plus_mapq", "os import sys import math import csv import collections import", "self.fdr = float(fdr) def width(self): return self.end-self.start+1 class JoinedPeak(Peak): WIDTH", "regardless of the presence of a peak). Options: --max-distance=DIST maximum", "for x in [ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ]) + '\\t' peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment)", "peak in peaks: closest = None for jp in joined_peaks[chrom]:", "in peaks: for j,set_name in enumerate(filesets.keys()): matrix[i][j] = float(jp.peaks[set_name].computed_chip) matrix[i][j+len(peaksets)]", "minus_model self.peaks = collections.defaultdict(list) self.peak_count = 0 self.plus_window = collections.deque([])", "= float(control_coverage) #i += 1 #quantile_normalize.quantile_norm(matrix) #i = 0 #for", "self.position = 0 def fill_scores(self,chrom,libtype,scoretype): plus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom," ]
[ "'method': 'UPDATE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read", "'create' in view_cls.crud_method_names and hasattr(view_cls, 'create'): create_op = { 'method':", "\"responseModel\": \"Error\" }, { 'code': 400, \"message\": \"form_invalid\", \"responseModel\": \"Error\"", "django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly from django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly class", "== \"\": continue if '{}' in path: path = path.replace('{}',", "\"name\": each[0], \"dataType\": 'float', \"format\": 'float', \"required\": True, } )", "http_json_response from django_town.cache.utlis import SimpleCache from django_town.oauth2.swagger import swagger_authorizations_data from", "{ 'code': 409, \"message\": \"method_not_allowed\", \"responseModel\": \"Error\" }, { 'code':", "view_cls.crud_method_names and hasattr(view_cls, 'create'): create_op = { 'method': 'POST', 'parameters':", "\"responseModel\": \"Error\" }, { 'code': 409, \"message\": \"conflict\", \"responseModel\": \"Error\"", "'description': manager.description, 'apiVersion': manager.api_version, 'swaggerVersion': \"1.2\", 'basePath': manager.base_url, 'resourcePath': manager.base_url,", "}, ] current_api = { 'path': path, 'description': view_cls.__doc__, }", "'int64', \"required\": True, } ) responseMessages = [ { 'code':", "path, } operations.append(op) current_api['operations'] = operations apis.append(current_api) ret['apis'] = apis", "\"message\": \"unauthorized\", \"responseModel\": \"Error\" }, { 'code': 400, \"message\": \"form_invalid\",", "\"message\": \"method_not_allowed\", \"responseModel\": \"Error\" }, { 'code': 409, \"message\": \"conflict\",", "{ \"type\": \"string\" } } } } for view_cls in", "= path.replace('{}', '{pk}') global_params.append( { \"paramType\": \"path\", \"name\": 'pk', \"description\":", "= [] path = view_cls.path() if path == \"\": continue", "'swaggerVersion': \"1.2\", 'basePath': manager.base_url, 'resourcePath': manager.base_url, 'info': manager.info, 'authorizations': swagger_authorizations_data()}", "and hasattr(view_cls, 'read'): op = { 'method': 'GET', 'responseMessages': responseMessages,", "\"required\": True, } ) else: params.append( { \"paramType\": \"query\", \"name\":", "\"Error\" }, ] current_api = { 'path': path, 'description': view_cls.__doc__,", "responseMessages, 'nickname': 'create ' + path, } operations.append(create_op) if 'read'", "{ 'code': 400, \"message\": \"form_invalid\", \"responseModel\": \"Error\" }, { 'code':", "\"responseModel\": \"Error\" }, { 'code': 401, \"message\": \"permission_denied\", \"responseModel\": \"Error\"", "'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read ' + path, }", "if each[1] == int: params.append( { \"paramType\": \"query\", \"name\": each[0],", "= http_json_response(ret) response[\"Access-Control-Allow-Origin\"] = \"*\" response[\"Access-Control-Allow-Methods\"] = \"GET\" response[\"Access-Control-Max-Age\"] =", "\"permission_denied\", \"responseModel\": \"Error\" }, { 'code': 401, \"message\": \"unauthorized\", \"responseModel\":", "400, \"message\": \"bad_request\", \"responseModel\": \"Error\" }, ] current_api = {", "}, \"message\": { \"type\": \"string\" }, \"resource\": { \"type\": \"string\"", "= { 'method': 'POST', 'parameters': global_params, 'responseMessages': responseMessages, 'nickname': 'create", "\"paramType\": \"query\", \"name\": 'access_token', \"dataType\": 'string', \"required\": True, } )", "django_town.http import http_json_response from django_town.cache.utlis import SimpleCache from django_town.oauth2.swagger import", "}, { 'code': 500, \"message\": \"internal_error\", \"responseModel\": \"Error\" }, {", "[ { 'code': 404, \"message\": \"not_found\", \"responseModel\": \"Error\" }, {", "* 24, load_callback=load_cache).get(api_version=api_version) response = http_json_response(ret) response[\"Access-Control-Allow-Origin\"] = \"*\" response[\"Access-Control-Allow-Methods\"]", "\"path\", \"name\": 'pk', \"description\": 'primary key for object', \"dataType\": 'integer',", "\"description\": 'primary key for object', \"dataType\": 'integer', \"format\": 'int64', \"required\":", "path, } operations.append(create_op) if 'read' in view_cls.crud_method_names and hasattr(view_cls, 'read'):", "duration=60 * 60 * 24, load_callback=load_cache).get(api_version=api_version) response = http_json_response(ret) response[\"Access-Control-Allow-Origin\"]", "\"type\": \"string\" }, \"message\": { \"type\": \"string\" }, \"resource\": {", "'nickname': 'read ' + path, } operations.append(op) if 'delete' in", "'read_safe_parameters'): for each in view_cls.read_safe_parameters: if isinstance(each, tuple): if each[1]", "responseMessages, 'errorResponses': [], 'nickname': 'read ' + path, } operations.append(op)", "swagger_authorizations_data from django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly from django_town.social.permissions import Authenticated,", "\"responseModel\": \"Error\" }, { 'code': 400, \"message\": \"bad_request\", \"responseModel\": \"Error\"", "{ \"error\": { \"type\": \"string\" }, \"field\": { \"type\": \"string\"", "path, 'description': view_cls.__doc__, } operations = [] if 'create' in", "ret['apis'] = apis ret[\"models\"] = models return ret ret =", "\"Error\", \"required\": ['error'], \"properties\": { \"error\": { \"type\": \"string\" },", "global_params.copy() for each_permission in view_cls.permission_classes: if issubclass(each_permission, OAuth2Authenticated): params.append( {", "from django_town.cache.utlis import SimpleCache from django_town.oauth2.swagger import swagger_authorizations_data from django_town.social.oauth2.permissions", "'responseMessages': responseMessages, 'nickname': 'create ' + path, } operations.append(create_op) if", "\"paramType\": \"query\", \"name\": each[0], \"dataType\": 'float', \"format\": 'float', \"required\": True,", "load_callback=load_cache).get(api_version=api_version) response = http_json_response(ret) response[\"Access-Control-Allow-Origin\"] = \"*\" response[\"Access-Control-Allow-Methods\"] = \"GET\"", "} } } for view_cls in manager.api_list: operations = []", "== int: params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'int',", "'update' in view_cls.crud_method_names and hasattr(view_cls, 'update'): op = { 'method':", "path = path.replace('{}', '{pk}') global_params.append( { \"paramType\": \"path\", \"name\": 'pk',", "if 'update' in view_cls.crud_method_names and hasattr(view_cls, 'update'): op = {", "op['parameters'] = params operations.append(op) if 'update' in view_cls.crud_method_names and hasattr(view_cls,", "if 'create' in view_cls.crud_method_names and hasattr(view_cls, 'create'): create_op = {", "\"responseModel\": \"Error\" }, { 'code': 409, \"message\": \"method_not_allowed\", \"responseModel\": \"Error\"", "current_api = { 'path': path, 'description': view_cls.__doc__, } operations =", "'path': path, 'description': view_cls.__doc__, } operations = [] if 'create'", "'responseMessages': responseMessages, 'nickname': 'read ' + path } params =", "class ApiDocsView(RestApiView): def read(self, request, api_version): def load_cache(api_version=\"alpha\"): manager =", "}, { 'code': 409, \"message\": \"conflict\", \"responseModel\": \"Error\" }, {", "operations.append(op) current_api['operations'] = operations apis.append(current_api) ret['apis'] = apis ret[\"models\"] =", "and hasattr(view_cls, 'delete'): op = { 'method': 'DELETE', 'parameters': global_params,", "\"paramType\": \"query\", \"name\": each[0], \"dataType\": 'string', \"required\": True, } )", "\"resource\": { \"type\": \"string\" } } } } for view_cls", "{ 'code': 404, \"message\": \"not_found\", \"responseModel\": \"Error\" }, { 'code':", "swagger_authorizations_data()} apis = [] models = { \"Error\": { \"id\":", "* 60 * 24, load_callback=load_cache).get(api_version=api_version) response = http_json_response(ret) response[\"Access-Control-Allow-Origin\"] =", "'code': 400, \"message\": \"bad_request\", \"responseModel\": \"Error\" }, ] current_api =", "responseMessages, 'nickname': 'read ' + path } params = global_params.copy()", "op = { 'method': 'GET', 'responseMessages': responseMessages, 'nickname': 'read '", "global_params.append( { \"paramType\": \"path\", \"name\": 'pk', \"description\": 'primary key for", "{ \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'string', \"required\": True, }", "'create ' + path, } operations.append(create_op) if 'read' in view_cls.crud_method_names", "'nickname': 'read ' + path } params = global_params.copy() for", "'errorResponses': [], 'nickname': 'read ' + path, } operations.append(op) if", "} } for view_cls in manager.api_list: operations = [] global_params", "manager.description, 'apiVersion': manager.api_version, 'swaggerVersion': \"1.2\", 'basePath': manager.base_url, 'resourcePath': manager.base_url, 'info':", "if issubclass(each_permission, OAuth2Authenticated): params.append( { \"paramType\": \"query\", \"name\": 'access_token', \"dataType\":", "'{}' in path: path = path.replace('{}', '{pk}') global_params.append( { \"paramType\":", "create_op = { 'method': 'POST', 'parameters': global_params, 'responseMessages': responseMessages, 'nickname':", "\"dataType\": 'int', \"format\": 'int64', \"required\": True, } ) elif each[1]", "if 'delete' in view_cls.crud_method_names and hasattr(view_cls, 'delete'): op = {", "\"required\": True, } ) if hasattr(view_cls, 'read_safe_parameters'): for each in", "\"method_not_allowed\", \"responseModel\": \"Error\" }, { 'code': 409, \"message\": \"conflict\", \"responseModel\":", "\"responseModel\": \"Error\" }, { 'code': 500, \"message\": \"internal_error\", \"responseModel\": \"Error\"", "each[0], \"dataType\": 'string', \"required\": True, } ) else: params.append( {", "AuthenticatedOrReadOnly class ApiDocsView(RestApiView): def read(self, request, api_version): def load_cache(api_version=\"alpha\"): manager", "key for object', \"dataType\": 'integer', \"format\": 'int64', \"required\": True, }", "\"not_found\", \"responseModel\": \"Error\" }, { 'code': 500, \"message\": \"internal_error\", \"responseModel\":", "params.append( { \"paramType\": \"query\", \"name\": 'access_token', \"dataType\": 'string', \"required\": True,", "'code': 403, \"message\": \"forbidden\", \"responseModel\": \"Error\" }, { 'code': 401,", "\"format\": 'int64', \"required\": True, } ) responseMessages = [ {", "\"form_invalid\", \"responseModel\": \"Error\" }, { 'code': 400, \"message\": \"form_required\", \"responseModel\":", "= { 'method': 'GET', 'responseMessages': responseMessages, 'nickname': 'read ' +", "<filename>django_town/rest_swagger/views.py from django_town.rest import RestApiView, rest_api_manager from django_town.http import http_json_response", "in path: path = path.replace('{}', '{pk}') global_params.append( { \"paramType\": \"path\",", "\"required\": ['error'], \"properties\": { \"error\": { \"type\": \"string\" }, \"field\":", "{ \"id\": \"Error\", \"required\": ['error'], \"properties\": { \"error\": { \"type\":", "{'title': manager.name, 'description': manager.description, 'apiVersion': manager.api_version, 'swaggerVersion': \"1.2\", 'basePath': manager.base_url,", "from django_town.rest import RestApiView, rest_api_manager from django_town.http import http_json_response from", "= [ { 'code': 404, \"message\": \"not_found\", \"responseModel\": \"Error\" },", "in manager.api_list: operations = [] global_params = [] path =", "in view_cls.crud_method_names and hasattr(view_cls, 'update'): op = { 'method': 'UPDATE',", "path } params = global_params.copy() for each_permission in view_cls.permission_classes: if", "'DELETE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read '", "operations.append(op) if 'update' in view_cls.crud_method_names and hasattr(view_cls, 'update'): op =", "for each_permission in view_cls.permission_classes: if issubclass(each_permission, OAuth2Authenticated): params.append( { \"paramType\":", "\"internal_error\", \"responseModel\": \"Error\" }, { 'code': 409, \"message\": \"method_not_allowed\", \"responseModel\":", "'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read ' +", "'{pk}') global_params.append( { \"paramType\": \"path\", \"name\": 'pk', \"description\": 'primary key", "if 'read' in view_cls.crud_method_names and hasattr(view_cls, 'read'): op = {", "each in view_cls.read_safe_parameters: if isinstance(each, tuple): if each[1] == int:", "each[0], \"dataType\": 'int', \"format\": 'int64', \"required\": True, } ) elif", "\"string\" }, \"field\": { \"type\": \"string\" }, \"message\": { \"type\":", "in view_cls.permission_classes: if issubclass(each_permission, OAuth2Authenticated): params.append( { \"paramType\": \"query\", \"name\":", "import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly from django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly class ApiDocsView(RestApiView):", "import Authenticated, AuthenticatedOrReadOnly class ApiDocsView(RestApiView): def read(self, request, api_version): def", "'code': 404, \"message\": \"not_found\", \"responseModel\": \"Error\" }, { 'code': 500,", "} ) else: params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\":", "manager.base_url, 'resourcePath': manager.base_url, 'info': manager.info, 'authorizations': swagger_authorizations_data()} apis = []", "\"properties\": { \"error\": { \"type\": \"string\" }, \"field\": { \"type\":", "global_params = [] path = view_cls.path() if path == \"\":", "params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'string', \"required\": True,", "' + path } params = global_params.copy() for each_permission in", "'method': 'GET', 'responseMessages': responseMessages, 'nickname': 'read ' + path }", "== float: params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'float',", "http_json_response(ret) response[\"Access-Control-Allow-Origin\"] = \"*\" response[\"Access-Control-Allow-Methods\"] = \"GET\" response[\"Access-Control-Max-Age\"] = \"1000\"", "409, \"message\": \"conflict\", \"responseModel\": \"Error\" }, { 'code': 403, \"message\":", "\"responseModel\": \"Error\" }, ] current_api = { 'path': path, 'description':", "'float', \"format\": 'float', \"required\": True, } ) else: params.append( {", "'authorizations': swagger_authorizations_data()} apis = [] models = { \"Error\": {", "hasattr(view_cls, 'create'): create_op = { 'method': 'POST', 'parameters': global_params, 'responseMessages':", "= [] global_params = [] path = view_cls.path() if path", "if hasattr(view_cls, 'read_safe_parameters'): for each in view_cls.read_safe_parameters: if isinstance(each, tuple):", "'update'): op = { 'method': 'UPDATE', 'parameters': global_params, 'responseMessages': responseMessages,", "for object', \"dataType\": 'integer', \"format\": 'int64', \"required\": True, } )", "isinstance(each, tuple): if each[1] == int: params.append( { \"paramType\": \"query\",", "404, \"message\": \"not_found\", \"responseModel\": \"Error\" }, { 'code': 500, \"message\":", "} ) if hasattr(view_cls, 'read_safe_parameters'): for each in view_cls.read_safe_parameters: if", "path.replace('{}', '{pk}') global_params.append( { \"paramType\": \"path\", \"name\": 'pk', \"description\": 'primary", "{ \"type\": \"string\" }, \"resource\": { \"type\": \"string\" } }", "else: params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'string', \"required\":", "for view_cls in manager.api_list: operations = [] global_params = []", "'method': 'DELETE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read", "each[1] == float: params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\":", "}, { 'code': 403, \"message\": \"forbidden\", \"responseModel\": \"Error\" }, {", "'code': 401, \"message\": \"permission_denied\", \"responseModel\": \"Error\" }, { 'code': 401,", "manager.info, 'authorizations': swagger_authorizations_data()} apis = [] models = { \"Error\":", "each_permission in view_cls.permission_classes: if issubclass(each_permission, OAuth2Authenticated): params.append( { \"paramType\": \"query\",", "\"message\": \"bad_request\", \"responseModel\": \"Error\" }, ] current_api = { 'path':", "\"type\": \"string\" }, \"resource\": { \"type\": \"string\" } } }", "RestApiView, rest_api_manager from django_town.http import http_json_response from django_town.cache.utlis import SimpleCache", "and hasattr(view_cls, 'create'): create_op = { 'method': 'POST', 'parameters': global_params,", "{ \"paramType\": \"query\", \"name\": each, \"dataType\": 'string', \"required\": True, }", "from django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly from django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly", "'primary key for object', \"dataType\": 'integer', \"format\": 'int64', \"required\": True,", "path: path = path.replace('{}', '{pk}') global_params.append( { \"paramType\": \"path\", \"name\":", "\"*\" response[\"Access-Control-Allow-Methods\"] = \"GET\" response[\"Access-Control-Max-Age\"] = \"1000\" response[\"Access-Control-Allow-Headers\"] = \"*\"", "} operations = [] if 'create' in view_cls.crud_method_names and hasattr(view_cls,", "{ 'code': 500, \"message\": \"internal_error\", \"responseModel\": \"Error\" }, { 'code':", "'code': 401, \"message\": \"unauthorized\", \"responseModel\": \"Error\" }, { 'code': 400,", ") elif each[1] == float: params.append( { \"paramType\": \"query\", \"name\":", "'string', \"required\": True, } ) if hasattr(view_cls, 'read_safe_parameters'): for each", "\"name\": each, \"dataType\": 'string', \"required\": True, } ) pass pass", "= {'title': manager.name, 'description': manager.description, 'apiVersion': manager.api_version, 'swaggerVersion': \"1.2\", 'basePath':", "\"dataType\": 'float', \"format\": 'float', \"required\": True, } ) else: params.append(", "{ 'code': 401, \"message\": \"permission_denied\", \"responseModel\": \"Error\" }, { 'code':", "each, \"dataType\": 'string', \"required\": True, } ) pass pass op['parameters']", "= SimpleCache(key_format=\"api-doc:%(api_version)s\", duration=60 * 60 * 24, load_callback=load_cache).get(api_version=api_version) response =", "import SimpleCache from django_town.oauth2.swagger import swagger_authorizations_data from django_town.social.oauth2.permissions import OAuth2Authenticated,", "in view_cls.read_safe_parameters: if isinstance(each, tuple): if each[1] == int: params.append(", "'code': 409, \"message\": \"conflict\", \"responseModel\": \"Error\" }, { 'code': 403,", "{ 'code': 401, \"message\": \"unauthorized\", \"responseModel\": \"Error\" }, { 'code':", "'POST', 'parameters': global_params, 'responseMessages': responseMessages, 'nickname': 'create ' + path,", "OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly from django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly class ApiDocsView(RestApiView): def", "\"responseModel\": \"Error\" }, { 'code': 401, \"message\": \"unauthorized\", \"responseModel\": \"Error\"", "hasattr(view_cls, 'read_safe_parameters'): for each in view_cls.read_safe_parameters: if isinstance(each, tuple): if", "\"string\" }, \"resource\": { \"type\": \"string\" } } } }", "{ \"type\": \"string\" }, \"field\": { \"type\": \"string\" }, \"message\":", "django_town.cache.utlis import SimpleCache from django_town.oauth2.swagger import swagger_authorizations_data from django_town.social.oauth2.permissions import", "from django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly class ApiDocsView(RestApiView): def read(self, request,", "\"Error\" }, { 'code': 400, \"message\": \"form_invalid\", \"responseModel\": \"Error\" },", "'GET', 'responseMessages': responseMessages, 'nickname': 'read ' + path } params", "apis = [] models = { \"Error\": { \"id\": \"Error\",", "response[\"Access-Control-Allow-Methods\"] = \"GET\" response[\"Access-Control-Max-Age\"] = \"1000\" response[\"Access-Control-Allow-Headers\"] = \"*\" return", "}, { 'code': 401, \"message\": \"unauthorized\", \"responseModel\": \"Error\" }, {", ") else: params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'string',", "True, } ) responseMessages = [ { 'code': 404, \"message\":", "\"format\": 'int64', \"required\": True, } ) elif each[1] == float:", "{ 'method': 'GET', 'responseMessages': responseMessages, 'nickname': 'read ' + path", "'read' in view_cls.crud_method_names and hasattr(view_cls, 'read'): op = { 'method':", "= rest_api_manager(api_version) ret = {'title': manager.name, 'description': manager.description, 'apiVersion': manager.api_version,", "in view_cls.crud_method_names and hasattr(view_cls, 'create'): create_op = { 'method': 'POST',", "apis.append(current_api) ret['apis'] = apis ret[\"models\"] = models return ret ret", "django_town.oauth2.swagger import swagger_authorizations_data from django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly from django_town.social.permissions", "response[\"Access-Control-Allow-Origin\"] = \"*\" response[\"Access-Control-Allow-Methods\"] = \"GET\" response[\"Access-Control-Max-Age\"] = \"1000\" response[\"Access-Control-Allow-Headers\"]", "[], 'nickname': 'read ' + path, } operations.append(op) current_api['operations'] =", "'code': 500, \"message\": \"internal_error\", \"responseModel\": \"Error\" }, { 'code': 409,", "\"type\": \"string\" } } } } for view_cls in manager.api_list:", "import http_json_response from django_town.cache.utlis import SimpleCache from django_town.oauth2.swagger import swagger_authorizations_data", "+ path } params = global_params.copy() for each_permission in view_cls.permission_classes:", "rest_api_manager from django_town.http import http_json_response from django_town.cache.utlis import SimpleCache from", "'read ' + path } params = global_params.copy() for each_permission", "tuple): if each[1] == int: params.append( { \"paramType\": \"query\", \"name\":", "60 * 24, load_callback=load_cache).get(api_version=api_version) response = http_json_response(ret) response[\"Access-Control-Allow-Origin\"] = \"*\"", "\"1.2\", 'basePath': manager.base_url, 'resourcePath': manager.base_url, 'info': manager.info, 'authorizations': swagger_authorizations_data()} apis", "'delete'): op = { 'method': 'DELETE', 'parameters': global_params, 'responseMessages': responseMessages,", "\"message\": { \"type\": \"string\" }, \"resource\": { \"type\": \"string\" }", "view_cls.read_safe_parameters: if isinstance(each, tuple): if each[1] == int: params.append( {", "path = view_cls.path() if path == \"\": continue if '{}'", "{ 'method': 'UPDATE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname':", "\"required\": True, } ) responseMessages = [ { 'code': 404,", "\"dataType\": 'integer', \"format\": 'int64', \"required\": True, } ) responseMessages =", "= [] if 'create' in view_cls.crud_method_names and hasattr(view_cls, 'create'): create_op", "\"required\": True, } ) elif each[1] == float: params.append( {", "= { 'method': 'DELETE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [],", "'integer', \"format\": 'int64', \"required\": True, } ) responseMessages = [", "\"responseModel\": \"Error\" }, { 'code': 403, \"message\": \"forbidden\", \"responseModel\": \"Error\"", "OAuth2AuthenticatedOrReadOnly from django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly class ApiDocsView(RestApiView): def read(self,", "\"Error\" }, { 'code': 401, \"message\": \"unauthorized\", \"responseModel\": \"Error\" },", "load_cache(api_version=\"alpha\"): manager = rest_api_manager(api_version) ret = {'title': manager.name, 'description': manager.description,", "\"message\": \"form_invalid\", \"responseModel\": \"Error\" }, { 'code': 400, \"message\": \"form_required\",", "if '{}' in path: path = path.replace('{}', '{pk}') global_params.append( {", "Authenticated, AuthenticatedOrReadOnly class ApiDocsView(RestApiView): def read(self, request, api_version): def load_cache(api_version=\"alpha\"):", "{ 'code': 409, \"message\": \"conflict\", \"responseModel\": \"Error\" }, { 'code':", "models return ret ret = SimpleCache(key_format=\"api-doc:%(api_version)s\", duration=60 * 60 *", "path, } operations.append(op) if 'delete' in view_cls.crud_method_names and hasattr(view_cls, 'delete'):", "\"error\": { \"type\": \"string\" }, \"field\": { \"type\": \"string\" },", "\"query\", \"name\": each, \"dataType\": 'string', \"required\": True, } ) pass", "401, \"message\": \"permission_denied\", \"responseModel\": \"Error\" }, { 'code': 401, \"message\":", "params.append( { \"paramType\": \"query\", \"name\": each, \"dataType\": 'string', \"required\": True,", "['error'], \"properties\": { \"error\": { \"type\": \"string\" }, \"field\": {", "'parameters': global_params, 'responseMessages': responseMessages, 'nickname': 'create ' + path, }", "'errorResponses': [], 'nickname': 'read ' + path, } operations.append(op) current_api['operations']", "+ path, } operations.append(op) current_api['operations'] = operations apis.append(current_api) ret['apis'] =", "ret ret = SimpleCache(key_format=\"api-doc:%(api_version)s\", duration=60 * 60 * 24, load_callback=load_cache).get(api_version=api_version)", "ret = {'title': manager.name, 'description': manager.description, 'apiVersion': manager.api_version, 'swaggerVersion': \"1.2\",", "[] models = { \"Error\": { \"id\": \"Error\", \"required\": ['error'],", "def load_cache(api_version=\"alpha\"): manager = rest_api_manager(api_version) ret = {'title': manager.name, 'description':", "params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'int', \"format\": 'int64',", "in view_cls.crud_method_names and hasattr(view_cls, 'delete'): op = { 'method': 'DELETE',", "object', \"dataType\": 'integer', \"format\": 'int64', \"required\": True, } ) responseMessages", "pass op['parameters'] = params operations.append(op) if 'update' in view_cls.crud_method_names and", "}, { 'code': 409, \"message\": \"method_not_allowed\", \"responseModel\": \"Error\" }, {", "\"Error\" }, { 'code': 403, \"message\": \"forbidden\", \"responseModel\": \"Error\" },", "} operations.append(op) if 'delete' in view_cls.crud_method_names and hasattr(view_cls, 'delete'): op", "[] global_params = [] path = view_cls.path() if path ==", "continue if '{}' in path: path = path.replace('{}', '{pk}') global_params.append(", "\"Error\" }, { 'code': 400, \"message\": \"bad_request\", \"responseModel\": \"Error\" },", "'nickname': 'read ' + path, } operations.append(op) current_api['operations'] = operations", "400, \"message\": \"form_required\", \"responseModel\": \"Error\" }, { 'code': 400, \"message\":", "\"Error\" }, { 'code': 400, \"message\": \"form_required\", \"responseModel\": \"Error\" },", "\"string\" } } } } for view_cls in manager.api_list: operations", "] current_api = { 'path': path, 'description': view_cls.__doc__, } operations", "\"Error\": { \"id\": \"Error\", \"required\": ['error'], \"properties\": { \"error\": {", "return ret ret = SimpleCache(key_format=\"api-doc:%(api_version)s\", duration=60 * 60 * 24,", "each[1] == int: params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\":", "} params = global_params.copy() for each_permission in view_cls.permission_classes: if issubclass(each_permission,", "SimpleCache from django_town.oauth2.swagger import swagger_authorizations_data from django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly", "hasattr(view_cls, 'read'): op = { 'method': 'GET', 'responseMessages': responseMessages, 'nickname':", "'string', \"required\": True, } ) pass pass op['parameters'] = params", "'float', \"required\": True, } ) else: params.append( { \"paramType\": \"query\",", "'description': view_cls.__doc__, } operations = [] if 'create' in view_cls.crud_method_names", "apis ret[\"models\"] = models return ret ret = SimpleCache(key_format=\"api-doc:%(api_version)s\", duration=60", "django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly class ApiDocsView(RestApiView): def read(self, request, api_version):", "response = http_json_response(ret) response[\"Access-Control-Allow-Origin\"] = \"*\" response[\"Access-Control-Allow-Methods\"] = \"GET\" response[\"Access-Control-Max-Age\"]", "\"field\": { \"type\": \"string\" }, \"message\": { \"type\": \"string\" },", "'read'): op = { 'method': 'GET', 'responseMessages': responseMessages, 'nickname': 'read", "'resourcePath': manager.base_url, 'info': manager.info, 'authorizations': swagger_authorizations_data()} apis = [] models", "\"message\": \"conflict\", \"responseModel\": \"Error\" }, { 'code': 403, \"message\": \"forbidden\",", "+ path, } operations.append(op) if 'delete' in view_cls.crud_method_names and hasattr(view_cls,", "if isinstance(each, tuple): if each[1] == int: params.append( { \"paramType\":", "= [] models = { \"Error\": { \"id\": \"Error\", \"required\":", "\"paramType\": \"path\", \"name\": 'pk', \"description\": 'primary key for object', \"dataType\":", "'create'): create_op = { 'method': 'POST', 'parameters': global_params, 'responseMessages': responseMessages,", "ret = SimpleCache(key_format=\"api-doc:%(api_version)s\", duration=60 * 60 * 24, load_callback=load_cache).get(api_version=api_version) response", "view_cls.permission_classes: if issubclass(each_permission, OAuth2Authenticated): params.append( { \"paramType\": \"query\", \"name\": 'access_token',", "path == \"\": continue if '{}' in path: path =", "= view_cls.path() if path == \"\": continue if '{}' in", "} operations.append(create_op) if 'read' in view_cls.crud_method_names and hasattr(view_cls, 'read'): op", "\"responseModel\": \"Error\" }, { 'code': 400, \"message\": \"form_required\", \"responseModel\": \"Error\"", "read(self, request, api_version): def load_cache(api_version=\"alpha\"): manager = rest_api_manager(api_version) ret =", "request, api_version): def load_cache(api_version=\"alpha\"): manager = rest_api_manager(api_version) ret = {'title':", "view_cls.crud_method_names and hasattr(view_cls, 'delete'): op = { 'method': 'DELETE', 'parameters':", "\"message\": \"permission_denied\", \"responseModel\": \"Error\" }, { 'code': 401, \"message\": \"unauthorized\",", "' + path, } operations.append(op) if 'delete' in view_cls.crud_method_names and", "'code': 400, \"message\": \"form_required\", \"responseModel\": \"Error\" }, { 'code': 400,", "float: params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'float', \"format\":", "[], 'nickname': 'read ' + path, } operations.append(op) if 'delete'", "\"Error\" }, { 'code': 500, \"message\": \"internal_error\", \"responseModel\": \"Error\" },", "24, load_callback=load_cache).get(api_version=api_version) response = http_json_response(ret) response[\"Access-Control-Allow-Origin\"] = \"*\" response[\"Access-Control-Allow-Methods\"] =", "\"message\": \"form_required\", \"responseModel\": \"Error\" }, { 'code': 400, \"message\": \"bad_request\",", "'basePath': manager.base_url, 'resourcePath': manager.base_url, 'info': manager.info, 'authorizations': swagger_authorizations_data()} apis =", "\"paramType\": \"query\", \"name\": each, \"dataType\": 'string', \"required\": True, } )", "django_town.rest import RestApiView, rest_api_manager from django_town.http import http_json_response from django_town.cache.utlis", "rest_api_manager(api_version) ret = {'title': manager.name, 'description': manager.description, 'apiVersion': manager.api_version, 'swaggerVersion':", "\"query\", \"name\": each[0], \"dataType\": 'int', \"format\": 'int64', \"required\": True, }", "'string', \"required\": True, } ) else: params.append( { \"paramType\": \"query\",", "True, } ) else: params.append( { \"paramType\": \"query\", \"name\": each[0],", "OAuth2Authenticated): params.append( { \"paramType\": \"query\", \"name\": 'access_token', \"dataType\": 'string', \"required\":", "{ \"Error\": { \"id\": \"Error\", \"required\": ['error'], \"properties\": { \"error\":", "}, \"resource\": { \"type\": \"string\" } } } } for", "400, \"message\": \"form_invalid\", \"responseModel\": \"Error\" }, { 'code': 400, \"message\":", "for each in view_cls.read_safe_parameters: if isinstance(each, tuple): if each[1] ==", "}, { 'code': 401, \"message\": \"permission_denied\", \"responseModel\": \"Error\" }, {", "ret[\"models\"] = models return ret ret = SimpleCache(key_format=\"api-doc:%(api_version)s\", duration=60 *", "{ 'method': 'POST', 'parameters': global_params, 'responseMessages': responseMessages, 'nickname': 'create '", "'int', \"format\": 'int64', \"required\": True, } ) elif each[1] ==", "\"query\", \"name\": each[0], \"dataType\": 'float', \"format\": 'float', \"required\": True, }", "{ \"paramType\": \"path\", \"name\": 'pk', \"description\": 'primary key for object',", "'read ' + path, } operations.append(op) if 'delete' in view_cls.crud_method_names", "manager.name, 'description': manager.description, 'apiVersion': manager.api_version, 'swaggerVersion': \"1.2\", 'basePath': manager.base_url, 'resourcePath':", "= params operations.append(op) if 'update' in view_cls.crud_method_names and hasattr(view_cls, 'update'):", "import RestApiView, rest_api_manager from django_town.http import http_json_response from django_town.cache.utlis import", "{ \"paramType\": \"query\", \"name\": 'access_token', \"dataType\": 'string', \"required\": True, }", "= global_params.copy() for each_permission in view_cls.permission_classes: if issubclass(each_permission, OAuth2Authenticated): params.append(", "\"message\": \"forbidden\", \"responseModel\": \"Error\" }, { 'code': 401, \"message\": \"permission_denied\",", "params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'float', \"format\": 'float',", "} ) responseMessages = [ { 'code': 404, \"message\": \"not_found\",", "SimpleCache(key_format=\"api-doc:%(api_version)s\", duration=60 * 60 * 24, load_callback=load_cache).get(api_version=api_version) response = http_json_response(ret)", "current_api['operations'] = operations apis.append(current_api) ret['apis'] = apis ret[\"models\"] = models", "= \"GET\" response[\"Access-Control-Max-Age\"] = \"1000\" response[\"Access-Control-Allow-Headers\"] = \"*\" return response", "409, \"message\": \"method_not_allowed\", \"responseModel\": \"Error\" }, { 'code': 409, \"message\":", "{ 'code': 400, \"message\": \"form_required\", \"responseModel\": \"Error\" }, { 'code':", "\"type\": \"string\" }, \"field\": { \"type\": \"string\" }, \"message\": {", "\"name\": 'pk', \"description\": 'primary key for object', \"dataType\": 'integer', \"format\":", "\"format\": 'float', \"required\": True, } ) else: params.append( { \"paramType\":", "manager.base_url, 'info': manager.info, 'authorizations': swagger_authorizations_data()} apis = [] models =", "'apiVersion': manager.api_version, 'swaggerVersion': \"1.2\", 'basePath': manager.base_url, 'resourcePath': manager.base_url, 'info': manager.info,", "= { \"Error\": { \"id\": \"Error\", \"required\": ['error'], \"properties\": {", "} for view_cls in manager.api_list: operations = [] global_params =", "params operations.append(op) if 'update' in view_cls.crud_method_names and hasattr(view_cls, 'update'): op", "'nickname': 'create ' + path, } operations.append(create_op) if 'read' in", "operations.append(op) if 'delete' in view_cls.crud_method_names and hasattr(view_cls, 'delete'): op =", "{ 'code': 400, \"message\": \"bad_request\", \"responseModel\": \"Error\" }, ] current_api", "}, { 'code': 400, \"message\": \"form_required\", \"responseModel\": \"Error\" }, {", "elif each[1] == float: params.append( { \"paramType\": \"query\", \"name\": each[0],", "\"\": continue if '{}' in path: path = path.replace('{}', '{pk}')", "view_cls in manager.api_list: operations = [] global_params = [] path", "operations = [] global_params = [] path = view_cls.path() if", "each[0], \"dataType\": 'float', \"format\": 'float', \"required\": True, } ) else:", "\"name\": 'access_token', \"dataType\": 'string', \"required\": True, } ) if hasattr(view_cls,", ") responseMessages = [ { 'code': 404, \"message\": \"not_found\", \"responseModel\":", "op = { 'method': 'UPDATE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses':", "issubclass(each_permission, OAuth2Authenticated): params.append( { \"paramType\": \"query\", \"name\": 'access_token', \"dataType\": 'string',", "op = { 'method': 'DELETE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses':", "\"Error\" }, { 'code': 409, \"message\": \"method_not_allowed\", \"responseModel\": \"Error\" },", "operations apis.append(current_api) ret['apis'] = apis ret[\"models\"] = models return ret", "from django_town.http import http_json_response from django_town.cache.utlis import SimpleCache from django_town.oauth2.swagger", "True, } ) if hasattr(view_cls, 'read_safe_parameters'): for each in view_cls.read_safe_parameters:", ") else: params.append( { \"paramType\": \"query\", \"name\": each, \"dataType\": 'string',", "'pk', \"description\": 'primary key for object', \"dataType\": 'integer', \"format\": 'int64',", "responseMessages = [ { 'code': 404, \"message\": \"not_found\", \"responseModel\": \"Error\"", "def read(self, request, api_version): def load_cache(api_version=\"alpha\"): manager = rest_api_manager(api_version) ret", "{ 'method': 'DELETE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname':", "} } } } for view_cls in manager.api_list: operations =", "' + path, } operations.append(op) current_api['operations'] = operations apis.append(current_api) ret['apis']", "'read ' + path, } operations.append(op) current_api['operations'] = operations apis.append(current_api)", "manager.api_list: operations = [] global_params = [] path = view_cls.path()", "view_cls.__doc__, } operations = [] if 'create' in view_cls.crud_method_names and", "ApiDocsView(RestApiView): def read(self, request, api_version): def load_cache(api_version=\"alpha\"): manager = rest_api_manager(api_version)", "'delete' in view_cls.crud_method_names and hasattr(view_cls, 'delete'): op = { 'method':", "'method': 'POST', 'parameters': global_params, 'responseMessages': responseMessages, 'nickname': 'create ' +", "global_params, 'responseMessages': responseMessages, 'nickname': 'create ' + path, } operations.append(create_op)", "operations = [] if 'create' in view_cls.crud_method_names and hasattr(view_cls, 'create'):", "{ \"type\": \"string\" }, \"message\": { \"type\": \"string\" }, \"resource\":", "'info': manager.info, 'authorizations': swagger_authorizations_data()} apis = [] models = {", "}, { 'code': 400, \"message\": \"form_invalid\", \"responseModel\": \"Error\" }, {", "manager = rest_api_manager(api_version) ret = {'title': manager.name, 'description': manager.description, 'apiVersion':", "\"message\": \"not_found\", \"responseModel\": \"Error\" }, { 'code': 500, \"message\": \"internal_error\",", "\"Error\" }, { 'code': 409, \"message\": \"conflict\", \"responseModel\": \"Error\" },", "'code': 409, \"message\": \"method_not_allowed\", \"responseModel\": \"Error\" }, { 'code': 409,", "{ \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'int', \"format\": 'int64', \"required\":", "view_cls.crud_method_names and hasattr(view_cls, 'update'): op = { 'method': 'UPDATE', 'parameters':", "= models return ret ret = SimpleCache(key_format=\"api-doc:%(api_version)s\", duration=60 * 60", "int: params.append( { \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'int', \"format\":", "True, } ) else: params.append( { \"paramType\": \"query\", \"name\": each,", "} ) elif each[1] == float: params.append( { \"paramType\": \"query\",", "\"conflict\", \"responseModel\": \"Error\" }, { 'code': 403, \"message\": \"forbidden\", \"responseModel\":", "pass pass op['parameters'] = params operations.append(op) if 'update' in view_cls.crud_method_names", "\"dataType\": 'string', \"required\": True, } ) pass pass op['parameters'] =", "True, } ) elif each[1] == float: params.append( { \"paramType\":", "{ 'path': path, 'description': view_cls.__doc__, } operations = [] if", "= operations apis.append(current_api) ret['apis'] = apis ret[\"models\"] = models return", "operations.append(create_op) if 'read' in view_cls.crud_method_names and hasattr(view_cls, 'read'): op =", "{ \"paramType\": \"query\", \"name\": each[0], \"dataType\": 'float', \"format\": 'float', \"required\":", "401, \"message\": \"unauthorized\", \"responseModel\": \"Error\" }, { 'code': 400, \"message\":", "in view_cls.crud_method_names and hasattr(view_cls, 'read'): op = { 'method': 'GET',", "hasattr(view_cls, 'update'): op = { 'method': 'UPDATE', 'parameters': global_params, 'responseMessages':", "from django_town.oauth2.swagger import swagger_authorizations_data from django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly from", "= { 'method': 'UPDATE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [],", "\"Error\" }, { 'code': 401, \"message\": \"permission_denied\", \"responseModel\": \"Error\" },", "else: params.append( { \"paramType\": \"query\", \"name\": each, \"dataType\": 'string', \"required\":", "\"required\": True, } ) pass pass op['parameters'] = params operations.append(op)", "import swagger_authorizations_data from django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly from django_town.social.permissions import", "'UPDATE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read '", "= { 'path': path, 'description': view_cls.__doc__, } operations = []", "view_cls.crud_method_names and hasattr(view_cls, 'read'): op = { 'method': 'GET', 'responseMessages':", "} ) else: params.append( { \"paramType\": \"query\", \"name\": each, \"dataType\":", "hasattr(view_cls, 'delete'): op = { 'method': 'DELETE', 'parameters': global_params, 'responseMessages':", "\"paramType\": \"query\", \"name\": each[0], \"dataType\": 'int', \"format\": 'int64', \"required\": True,", "\"name\": each[0], \"dataType\": 'string', \"required\": True, } ) else: params.append(", "500, \"message\": \"internal_error\", \"responseModel\": \"Error\" }, { 'code': 409, \"message\":", "\"dataType\": 'string', \"required\": True, } ) else: params.append( { \"paramType\":", "\"query\", \"name\": 'access_token', \"dataType\": 'string', \"required\": True, } ) if", "\"forbidden\", \"responseModel\": \"Error\" }, { 'code': 401, \"message\": \"permission_denied\", \"responseModel\":", "= \"*\" response[\"Access-Control-Allow-Methods\"] = \"GET\" response[\"Access-Control-Max-Age\"] = \"1000\" response[\"Access-Control-Allow-Headers\"] =", "403, \"message\": \"forbidden\", \"responseModel\": \"Error\" }, { 'code': 401, \"message\":", "params = global_params.copy() for each_permission in view_cls.permission_classes: if issubclass(each_permission, OAuth2Authenticated):", "+ path, } operations.append(create_op) if 'read' in view_cls.crud_method_names and hasattr(view_cls,", "models = { \"Error\": { \"id\": \"Error\", \"required\": ['error'], \"properties\":", "\"id\": \"Error\", \"required\": ['error'], \"properties\": { \"error\": { \"type\": \"string\"", "\"unauthorized\", \"responseModel\": \"Error\" }, { 'code': 400, \"message\": \"form_invalid\", \"responseModel\":", "' + path, } operations.append(create_op) if 'read' in view_cls.crud_method_names and", "} operations.append(op) current_api['operations'] = operations apis.append(current_api) ret['apis'] = apis ret[\"models\"]", "'code': 400, \"message\": \"form_invalid\", \"responseModel\": \"Error\" }, { 'code': 400,", "'int64', \"required\": True, } ) elif each[1] == float: params.append(", "'access_token', \"dataType\": 'string', \"required\": True, } ) if hasattr(view_cls, 'read_safe_parameters'):", ") pass pass op['parameters'] = params operations.append(op) if 'update' in", "\"bad_request\", \"responseModel\": \"Error\" }, ] current_api = { 'path': path,", "True, } ) pass pass op['parameters'] = params operations.append(op) if", "\"dataType\": 'string', \"required\": True, } ) if hasattr(view_cls, 'read_safe_parameters'): for", "= apis ret[\"models\"] = models return ret ret = SimpleCache(key_format=\"api-doc:%(api_version)s\",", "view_cls.path() if path == \"\": continue if '{}' in path:", "if path == \"\": continue if '{}' in path: path", "\"form_required\", \"responseModel\": \"Error\" }, { 'code': 400, \"message\": \"bad_request\", \"responseModel\":", "global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read ' + path,", "}, { 'code': 400, \"message\": \"bad_request\", \"responseModel\": \"Error\" }, ]", "\"name\": each[0], \"dataType\": 'int', \"format\": 'int64', \"required\": True, } )", "\"query\", \"name\": each[0], \"dataType\": 'string', \"required\": True, } ) else:", "\"message\": \"internal_error\", \"responseModel\": \"Error\" }, { 'code': 409, \"message\": \"method_not_allowed\",", "} ) pass pass op['parameters'] = params operations.append(op) if 'update'", ") if hasattr(view_cls, 'read_safe_parameters'): for each in view_cls.read_safe_parameters: if isinstance(each,", "manager.api_version, 'swaggerVersion': \"1.2\", 'basePath': manager.base_url, 'resourcePath': manager.base_url, 'info': manager.info, 'authorizations':", "and hasattr(view_cls, 'update'): op = { 'method': 'UPDATE', 'parameters': global_params,", "[] if 'create' in view_cls.crud_method_names and hasattr(view_cls, 'create'): create_op =", "[] path = view_cls.path() if path == \"\": continue if", "}, \"field\": { \"type\": \"string\" }, \"message\": { \"type\": \"string\"", "\"string\" }, \"message\": { \"type\": \"string\" }, \"resource\": { \"type\":", "api_version): def load_cache(api_version=\"alpha\"): manager = rest_api_manager(api_version) ret = {'title': manager.name,", "{ 'code': 403, \"message\": \"forbidden\", \"responseModel\": \"Error\" }, { 'code':" ]
[ "test_dddo003_value_no_options(dash_dcc): app = Dash(__name__) app.layout = html.Div( [ dcc.Dropdown(value=\"foobar\", id=\"dropdown\"),", "[Input(\"my-dynamic-dropdown\", \"search_value\")], ) def update_options(search_value): if not search_value: raise PreventUpdate", "Dash(__name__) app.layout = html.Div( [ dcc.Dropdown(value=\"foobar\", id=\"dropdown\"), ] ) dash_dcc.start_server(app)", "\"San Francisco, CA\\n \") assert dash_dcc.get_logs() == [] def test_dddo003_value_no_options(dash_dcc):", "input\") # Focus on the input to open the options", "empty message. dash_dcc.wait_for_text_to_equal(\".Select-noresults\", \"No results found\") input_.clear() input_.send_keys(\"o\") options =", "\"value\": \"SF\"}, ] app = Dash(__name__) app.layout = dcc.Dropdown(id=\"my-dynamic-dropdown\", options=[])", "1 print(options) assert options[0].text == \"Montreal\" assert dash_dcc.get_logs() == []", "dash import Dash, Input, Output, dcc, html from dash.exceptions import", "multi=True, ) app.layout = html.Div(dropdown) dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal(\"#react-select-2--value-0\", \"San Francisco, CA\\n", "`x` in them, should show the empty message. dash_dcc.wait_for_text_to_equal(\".Select-noresults\", \"No", "== [] def test_dddo003_value_no_options(dash_dcc): app = Dash(__name__) app.layout = html.Div(", "def test_dddo002_array_comma_value(dash_dcc): app = Dash(__name__) dropdown = dcc.Dropdown( options=[\"New York,", "dash_dcc.wait_for_text_to_equal(\"#react-select-2--value-0\", \"San Francisco, CA\\n \") assert dash_dcc.get_logs() == [] def", "\"No results found\") input_.clear() input_.send_keys(\"o\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") #", "QC\", \"San Francisco, CA\"], value=[\"San Francisco, CA\"], multi=True, ) app.layout", "app = Dash(__name__) app.layout = html.Div( [ dcc.Dropdown(value=\"foobar\", id=\"dropdown\"), ]", "def test_dddo003_value_no_options(dash_dcc): app = Dash(__name__) app.layout = html.Div( [ dcc.Dropdown(value=\"foobar\",", "PreventUpdate return [o for o in dropdown_options if search_value in", "len(options) == 3 # Searching for `on` input_.send_keys(\"n\") options =", "# Searching for `on` input_.send_keys(\"n\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") assert", "import PreventUpdate def test_dddo001_dynamic_options(dash_dcc): dropdown_options = [ {\"label\": \"New York", "if not search_value: raise PreventUpdate return [o for o in", "== [] def test_dddo002_array_comma_value(dash_dcc): app = Dash(__name__) dropdown = dcc.Dropdown(", "print(options) assert options[0].text == \"Montreal\" assert dash_dcc.get_logs() == [] def", "o in dropdown_options if search_value in o[\"label\"]] dash_dcc.start_server(app) # Get", "assert dash_dcc.get_logs() == [] def test_dddo003_value_no_options(dash_dcc): app = Dash(__name__) app.layout", "\"value\": \"NYC\"}, {\"label\": \"Montreal\", \"value\": \"MTL\"}, {\"label\": \"San Francisco\", \"value\":", "Output(\"my-dynamic-dropdown\", \"options\"), [Input(\"my-dynamic-dropdown\", \"search_value\")], ) def update_options(search_value): if not search_value:", "input to open the options menu input_.send_keys(\"x\") # No options", "Focus on the input to open the options menu input_.send_keys(\"x\")", "results found\") input_.clear() input_.send_keys(\"o\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") # Should", "input_.clear() input_.send_keys(\"o\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") # Should show all", "= html.Div( [ dcc.Dropdown(value=\"foobar\", id=\"dropdown\"), ] ) dash_dcc.start_server(app) assert dash_dcc.get_logs()", "def test_dddo001_dynamic_options(dash_dcc): dropdown_options = [ {\"label\": \"New York City\", \"value\":", ".VirtualizedSelectOption\") # Should show all options. assert len(options) == 3", "\"San Francisco, CA\"], value=[\"San Francisco, CA\"], multi=True, ) app.layout =", "\"MTL\"}, {\"label\": \"San Francisco\", \"value\": \"SF\"}, ] app = Dash(__name__)", "search_value: raise PreventUpdate return [o for o in dropdown_options if", "search value. input_ = dash_dcc.find_element(\"#my-dynamic-dropdown input\") # Focus on the", "dash_dcc.find_element(\"#my-dynamic-dropdown input\") # Focus on the input to open the", "Should show all options. assert len(options) == 3 # Searching", "if search_value in o[\"label\"]] dash_dcc.start_server(app) # Get the inner input", "\"SF\"}, ] app = Dash(__name__) app.layout = dcc.Dropdown(id=\"my-dynamic-dropdown\", options=[]) @app.callback(", "\"New York City\", \"value\": \"NYC\"}, {\"label\": \"Montreal\", \"value\": \"MTL\"}, {\"label\":", "Francisco, CA\"], multi=True, ) app.layout = html.Div(dropdown) dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal(\"#react-select-2--value-0\", \"San", "= dash_dcc.find_element(\"#my-dynamic-dropdown input\") # Focus on the input to open", "found with `x` in them, should show the empty message.", "options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") assert len(options) == 1 print(options) assert", "= dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") # Should show all options. assert len(options)", "Get the inner input used for search value. input_ =", "to be found with `x` in them, should show the", "\"Montreal\" assert dash_dcc.get_logs() == [] def test_dddo002_array_comma_value(dash_dcc): app = Dash(__name__)", "{\"label\": \"New York City\", \"value\": \"NYC\"}, {\"label\": \"Montreal\", \"value\": \"MTL\"},", "import Dash, Input, Output, dcc, html from dash.exceptions import PreventUpdate", "in them, should show the empty message. dash_dcc.wait_for_text_to_equal(\".Select-noresults\", \"No results", "update_options(search_value): if not search_value: raise PreventUpdate return [o for o", "should show the empty message. dash_dcc.wait_for_text_to_equal(\".Select-noresults\", \"No results found\") input_.clear()", "with `x` in them, should show the empty message. dash_dcc.wait_for_text_to_equal(\".Select-noresults\",", "found\") input_.clear() input_.send_keys(\"o\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") # Should show", "for search value. input_ = dash_dcc.find_element(\"#my-dynamic-dropdown input\") # Focus on", "return [o for o in dropdown_options if search_value in o[\"label\"]]", "# Focus on the input to open the options menu", "dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal(\"#react-select-2--value-0\", \"San Francisco, CA\\n \") assert dash_dcc.get_logs() == []", "NY\", \"Montreal, QC\", \"San Francisco, CA\"], value=[\"San Francisco, CA\"], multi=True,", "dash.exceptions import PreventUpdate def test_dddo001_dynamic_options(dash_dcc): dropdown_options = [ {\"label\": \"New", "raise PreventUpdate return [o for o in dropdown_options if search_value", "options menu input_.send_keys(\"x\") # No options to be found with", "input_.send_keys(\"x\") # No options to be found with `x` in", "assert len(options) == 1 print(options) assert options[0].text == \"Montreal\" assert", "dropdown = dcc.Dropdown( options=[\"New York, NY\", \"Montreal, QC\", \"San Francisco,", "dash_dcc.get_logs() == [] def test_dddo003_value_no_options(dash_dcc): app = Dash(__name__) app.layout =", "html from dash.exceptions import PreventUpdate def test_dddo001_dynamic_options(dash_dcc): dropdown_options = [", "] app = Dash(__name__) app.layout = dcc.Dropdown(id=\"my-dynamic-dropdown\", options=[]) @app.callback( Output(\"my-dynamic-dropdown\",", "Dash(__name__) app.layout = dcc.Dropdown(id=\"my-dynamic-dropdown\", options=[]) @app.callback( Output(\"my-dynamic-dropdown\", \"options\"), [Input(\"my-dynamic-dropdown\", \"search_value\")],", "dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") # Should show all options. assert len(options) ==", "= Dash(__name__) app.layout = html.Div( [ dcc.Dropdown(value=\"foobar\", id=\"dropdown\"), ] )", "dropdown_options if search_value in o[\"label\"]] dash_dcc.start_server(app) # Get the inner", "Dash, Input, Output, dcc, html from dash.exceptions import PreventUpdate def", "\"search_value\")], ) def update_options(search_value): if not search_value: raise PreventUpdate return", "`on` input_.send_keys(\"n\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") assert len(options) == 1", "dcc, html from dash.exceptions import PreventUpdate def test_dddo001_dynamic_options(dash_dcc): dropdown_options =", "for `on` input_.send_keys(\"n\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") assert len(options) ==", "Francisco\", \"value\": \"SF\"}, ] app = Dash(__name__) app.layout = dcc.Dropdown(id=\"my-dynamic-dropdown\",", "City\", \"value\": \"NYC\"}, {\"label\": \"Montreal\", \"value\": \"MTL\"}, {\"label\": \"San Francisco\",", "# Get the inner input used for search value. input_", "from dash.exceptions import PreventUpdate def test_dddo001_dynamic_options(dash_dcc): dropdown_options = [ {\"label\":", "options=[\"New York, NY\", \"Montreal, QC\", \"San Francisco, CA\"], value=[\"San Francisco,", "value=[\"San Francisco, CA\"], multi=True, ) app.layout = html.Div(dropdown) dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal(\"#react-select-2--value-0\",", "[ dcc.Dropdown(value=\"foobar\", id=\"dropdown\"), ] ) dash_dcc.start_server(app) assert dash_dcc.get_logs() == []", "html.Div( [ dcc.Dropdown(value=\"foobar\", id=\"dropdown\"), ] ) dash_dcc.start_server(app) assert dash_dcc.get_logs() ==", "menu input_.send_keys(\"x\") # No options to be found with `x`", "= Dash(__name__) dropdown = dcc.Dropdown( options=[\"New York, NY\", \"Montreal, QC\",", "app = Dash(__name__) dropdown = dcc.Dropdown( options=[\"New York, NY\", \"Montreal,", "to open the options menu input_.send_keys(\"x\") # No options to", "the empty message. dash_dcc.wait_for_text_to_equal(\".Select-noresults\", \"No results found\") input_.clear() input_.send_keys(\"o\") options", "{\"label\": \"San Francisco\", \"value\": \"SF\"}, ] app = Dash(__name__) app.layout", "Searching for `on` input_.send_keys(\"n\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") assert len(options)", "assert dash_dcc.get_logs() == [] def test_dddo002_array_comma_value(dash_dcc): app = Dash(__name__) dropdown", "open the options menu input_.send_keys(\"x\") # No options to be", "dash_dcc.start_server(app) # Get the inner input used for search value.", "dcc.Dropdown(value=\"foobar\", id=\"dropdown\"), ] ) dash_dcc.start_server(app) assert dash_dcc.get_logs() == [] dash_dcc.wait_for_element(\"#dropdown\")", "show the empty message. dash_dcc.wait_for_text_to_equal(\".Select-noresults\", \"No results found\") input_.clear() input_.send_keys(\"o\")", "York City\", \"value\": \"NYC\"}, {\"label\": \"Montreal\", \"value\": \"MTL\"}, {\"label\": \"San", "value. input_ = dash_dcc.find_element(\"#my-dynamic-dropdown input\") # Focus on the input", "not search_value: raise PreventUpdate return [o for o in dropdown_options", "app.layout = html.Div(dropdown) dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal(\"#react-select-2--value-0\", \"San Francisco, CA\\n \") assert", "\"Montreal\", \"value\": \"MTL\"}, {\"label\": \"San Francisco\", \"value\": \"SF\"}, ] app", "Dash(__name__) dropdown = dcc.Dropdown( options=[\"New York, NY\", \"Montreal, QC\", \"San", "Input, Output, dcc, html from dash.exceptions import PreventUpdate def test_dddo001_dynamic_options(dash_dcc):", "Francisco, CA\"], value=[\"San Francisco, CA\"], multi=True, ) app.layout = html.Div(dropdown)", ") def update_options(search_value): if not search_value: raise PreventUpdate return [o", "message. dash_dcc.wait_for_text_to_equal(\".Select-noresults\", \"No results found\") input_.clear() input_.send_keys(\"o\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown", "all options. assert len(options) == 3 # Searching for `on`", "assert len(options) == 3 # Searching for `on` input_.send_keys(\"n\") options", "Output, dcc, html from dash.exceptions import PreventUpdate def test_dddo001_dynamic_options(dash_dcc): dropdown_options", "app = Dash(__name__) app.layout = dcc.Dropdown(id=\"my-dynamic-dropdown\", options=[]) @app.callback( Output(\"my-dynamic-dropdown\", \"options\"),", "[] def test_dddo002_array_comma_value(dash_dcc): app = Dash(__name__) dropdown = dcc.Dropdown( options=[\"New", "options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") # Should show all options. assert", ".VirtualizedSelectOption\") assert len(options) == 1 print(options) assert options[0].text == \"Montreal\"", "def update_options(search_value): if not search_value: raise PreventUpdate return [o for", "html.Div(dropdown) dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal(\"#react-select-2--value-0\", \"San Francisco, CA\\n \") assert dash_dcc.get_logs() ==", "be found with `x` in them, should show the empty", "[o for o in dropdown_options if search_value in o[\"label\"]] dash_dcc.start_server(app)", "dcc.Dropdown( options=[\"New York, NY\", \"Montreal, QC\", \"San Francisco, CA\"], value=[\"San", "input_.send_keys(\"n\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") assert len(options) == 1 print(options)", "== \"Montreal\" assert dash_dcc.get_logs() == [] def test_dddo002_array_comma_value(dash_dcc): app =", "used for search value. input_ = dash_dcc.find_element(\"#my-dynamic-dropdown input\") # Focus", "input used for search value. input_ = dash_dcc.find_element(\"#my-dynamic-dropdown input\") #", "input_ = dash_dcc.find_element(\"#my-dynamic-dropdown input\") # Focus on the input to", "o[\"label\"]] dash_dcc.start_server(app) # Get the inner input used for search", "\"San Francisco\", \"value\": \"SF\"}, ] app = Dash(__name__) app.layout =", "the options menu input_.send_keys(\"x\") # No options to be found", "= Dash(__name__) app.layout = dcc.Dropdown(id=\"my-dynamic-dropdown\", options=[]) @app.callback( Output(\"my-dynamic-dropdown\", \"options\"), [Input(\"my-dynamic-dropdown\",", "# Should show all options. assert len(options) == 3 #", "them, should show the empty message. dash_dcc.wait_for_text_to_equal(\".Select-noresults\", \"No results found\")", "3 # Searching for `on` input_.send_keys(\"n\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\")", "CA\"], value=[\"San Francisco, CA\"], multi=True, ) app.layout = html.Div(dropdown) dash_dcc.start_server(app)", "= [ {\"label\": \"New York City\", \"value\": \"NYC\"}, {\"label\": \"Montreal\",", "= dcc.Dropdown(id=\"my-dynamic-dropdown\", options=[]) @app.callback( Output(\"my-dynamic-dropdown\", \"options\"), [Input(\"my-dynamic-dropdown\", \"search_value\")], ) def", "in o[\"label\"]] dash_dcc.start_server(app) # Get the inner input used for", "inner input used for search value. input_ = dash_dcc.find_element(\"#my-dynamic-dropdown input\")", "CA\\n \") assert dash_dcc.get_logs() == [] def test_dddo003_value_no_options(dash_dcc): app =", "York, NY\", \"Montreal, QC\", \"San Francisco, CA\"], value=[\"San Francisco, CA\"],", "app.layout = html.Div( [ dcc.Dropdown(value=\"foobar\", id=\"dropdown\"), ] ) dash_dcc.start_server(app) assert", "= dcc.Dropdown( options=[\"New York, NY\", \"Montreal, QC\", \"San Francisco, CA\"],", "the inner input used for search value. input_ = dash_dcc.find_element(\"#my-dynamic-dropdown", "options. assert len(options) == 3 # Searching for `on` input_.send_keys(\"n\")", "test_dddo001_dynamic_options(dash_dcc): dropdown_options = [ {\"label\": \"New York City\", \"value\": \"NYC\"},", "test_dddo002_array_comma_value(dash_dcc): app = Dash(__name__) dropdown = dcc.Dropdown( options=[\"New York, NY\",", "dash_dcc.wait_for_text_to_equal(\".Select-noresults\", \"No results found\") input_.clear() input_.send_keys(\"o\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\")", "app.layout = dcc.Dropdown(id=\"my-dynamic-dropdown\", options=[]) @app.callback( Output(\"my-dynamic-dropdown\", \"options\"), [Input(\"my-dynamic-dropdown\", \"search_value\")], )", "@app.callback( Output(\"my-dynamic-dropdown\", \"options\"), [Input(\"my-dynamic-dropdown\", \"search_value\")], ) def update_options(search_value): if not", "PreventUpdate def test_dddo001_dynamic_options(dash_dcc): dropdown_options = [ {\"label\": \"New York City\",", "# No options to be found with `x` in them,", "dcc.Dropdown(id=\"my-dynamic-dropdown\", options=[]) @app.callback( Output(\"my-dynamic-dropdown\", \"options\"), [Input(\"my-dynamic-dropdown\", \"search_value\")], ) def update_options(search_value):", "search_value in o[\"label\"]] dash_dcc.start_server(app) # Get the inner input used", "== 1 print(options) assert options[0].text == \"Montreal\" assert dash_dcc.get_logs() ==", "dropdown_options = [ {\"label\": \"New York City\", \"value\": \"NYC\"}, {\"label\":", "\") assert dash_dcc.get_logs() == [] def test_dddo003_value_no_options(dash_dcc): app = Dash(__name__)", "dash_dcc.get_logs() == [] def test_dddo002_array_comma_value(dash_dcc): app = Dash(__name__) dropdown =", "Francisco, CA\\n \") assert dash_dcc.get_logs() == [] def test_dddo003_value_no_options(dash_dcc): app", "[ {\"label\": \"New York City\", \"value\": \"NYC\"}, {\"label\": \"Montreal\", \"value\":", "len(options) == 1 print(options) assert options[0].text == \"Montreal\" assert dash_dcc.get_logs()", "options=[]) @app.callback( Output(\"my-dynamic-dropdown\", \"options\"), [Input(\"my-dynamic-dropdown\", \"search_value\")], ) def update_options(search_value): if", "show all options. assert len(options) == 3 # Searching for", "for o in dropdown_options if search_value in o[\"label\"]] dash_dcc.start_server(app) #", "input_.send_keys(\"o\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") # Should show all options.", ") app.layout = html.Div(dropdown) dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal(\"#react-select-2--value-0\", \"San Francisco, CA\\n \")", "\"Montreal, QC\", \"San Francisco, CA\"], value=[\"San Francisco, CA\"], multi=True, )", "= dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") assert len(options) == 1 print(options) assert options[0].text", "options[0].text == \"Montreal\" assert dash_dcc.get_logs() == [] def test_dddo002_array_comma_value(dash_dcc): app", "\"options\"), [Input(\"my-dynamic-dropdown\", \"search_value\")], ) def update_options(search_value): if not search_value: raise", "= html.Div(dropdown) dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal(\"#react-select-2--value-0\", \"San Francisco, CA\\n \") assert dash_dcc.get_logs()", "== 3 # Searching for `on` input_.send_keys(\"n\") options = dash_dcc.find_elements(\"#my-dynamic-dropdown", "\"value\": \"MTL\"}, {\"label\": \"San Francisco\", \"value\": \"SF\"}, ] app =", "{\"label\": \"Montreal\", \"value\": \"MTL\"}, {\"label\": \"San Francisco\", \"value\": \"SF\"}, ]", "[] def test_dddo003_value_no_options(dash_dcc): app = Dash(__name__) app.layout = html.Div( [", "options to be found with `x` in them, should show", "CA\"], multi=True, ) app.layout = html.Div(dropdown) dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal(\"#react-select-2--value-0\", \"San Francisco,", "from dash import Dash, Input, Output, dcc, html from dash.exceptions", "the input to open the options menu input_.send_keys(\"x\") # No", "on the input to open the options menu input_.send_keys(\"x\") #", "No options to be found with `x` in them, should", "assert options[0].text == \"Montreal\" assert dash_dcc.get_logs() == [] def test_dddo002_array_comma_value(dash_dcc):", "dash_dcc.find_elements(\"#my-dynamic-dropdown .VirtualizedSelectOption\") assert len(options) == 1 print(options) assert options[0].text ==", "\"NYC\"}, {\"label\": \"Montreal\", \"value\": \"MTL\"}, {\"label\": \"San Francisco\", \"value\": \"SF\"},", "in dropdown_options if search_value in o[\"label\"]] dash_dcc.start_server(app) # Get the" ]
[ "result, frame = cv2.imencode('.jpg', frame, encode_param) # data = zlib.compress(pickle.dumps(frame,", "cam = cv2.VideoCapture(\"E:/songs/Attention <NAME>(GabbarWorld.com) 1080p.mp4\") cam.set(3, 320) cam.set(4, 240) img_counter", "client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485)) connection = client_socket.makefile('wb') cam", "= zlib.compress(pickle.dumps(frame, 0)) data = pickle.dumps(frame, 0) size = len(data)", "size = len(data) print(\"{}: {}\".format(img_counter, size)) client_socket.sendall(struct.pack(\">L\", size) + data)", "frame, encode_param) # data = zlib.compress(pickle.dumps(frame, 0)) data = pickle.dumps(frame,", "frame = cv2.imencode('.jpg', frame, encode_param) # data = zlib.compress(pickle.dumps(frame, 0))", "socket import struct import time import pickle import zlib client_socket", "import cv2 import io import socket import struct import time", "= client_socket.makefile('wb') cam = cv2.VideoCapture(\"E:/songs/Attention <NAME>(GabbarWorld.com) 1080p.mp4\") cam.set(3, 320) cam.set(4,", "client_socket.connect(('127.0.0.1', 8485)) connection = client_socket.makefile('wb') cam = cv2.VideoCapture(\"E:/songs/Attention <NAME>(GabbarWorld.com) 1080p.mp4\")", "cv2 import io import socket import struct import time import", "data = pickle.dumps(frame, 0) size = len(data) print(\"{}: {}\".format(img_counter, size))", "# data = zlib.compress(pickle.dumps(frame, 0)) data = pickle.dumps(frame, 0) size", "encode_param) # data = zlib.compress(pickle.dumps(frame, 0)) data = pickle.dumps(frame, 0)", "0) size = len(data) print(\"{}: {}\".format(img_counter, size)) client_socket.sendall(struct.pack(\">L\", size) +", "print(\"{}: {}\".format(img_counter, size)) client_socket.sendall(struct.pack(\">L\", size) + data) img_counter += 1", "<NAME>(GabbarWorld.com) 1080p.mp4\") cam.set(3, 320) cam.set(4, 240) img_counter = 0 encode_param", "= [int(cv2.IMWRITE_JPEG_QUALITY), 90] while True: ret, frame = cam.read() result,", "cam.read() result, frame = cv2.imencode('.jpg', frame, encode_param) # data =", "img_counter = 0 encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90] while True: ret,", "1080p.mp4\") cam.set(3, 320) cam.set(4, 240) img_counter = 0 encode_param =", "<filename>Server.py<gh_stars>1-10 import cv2 import io import socket import struct import", "ret, frame = cam.read() result, frame = cv2.imencode('.jpg', frame, encode_param)", "320) cam.set(4, 240) img_counter = 0 encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]", "90] while True: ret, frame = cam.read() result, frame =", "io import socket import struct import time import pickle import", "= cv2.VideoCapture(\"E:/songs/Attention <NAME>(GabbarWorld.com) 1080p.mp4\") cam.set(3, 320) cam.set(4, 240) img_counter =", "pickle.dumps(frame, 0) size = len(data) print(\"{}: {}\".format(img_counter, size)) client_socket.sendall(struct.pack(\">L\", size)", "import zlib client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485)) connection =", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485)) connection = client_socket.makefile('wb') cam = cv2.VideoCapture(\"E:/songs/Attention", "connection = client_socket.makefile('wb') cam = cv2.VideoCapture(\"E:/songs/Attention <NAME>(GabbarWorld.com) 1080p.mp4\") cam.set(3, 320)", "= cam.read() result, frame = cv2.imencode('.jpg', frame, encode_param) # data", "= 0 encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90] while True: ret, frame", "import struct import time import pickle import zlib client_socket =", "pickle import zlib client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485)) connection", "True: ret, frame = cam.read() result, frame = cv2.imencode('.jpg', frame,", "{}\".format(img_counter, size)) client_socket.sendall(struct.pack(\">L\", size) + data) img_counter += 1 cam.release()", "import socket import struct import time import pickle import zlib", "struct import time import pickle import zlib client_socket = socket.socket(socket.AF_INET,", "while True: ret, frame = cam.read() result, frame = cv2.imencode('.jpg',", "= cv2.imencode('.jpg', frame, encode_param) # data = zlib.compress(pickle.dumps(frame, 0)) data", "data = zlib.compress(pickle.dumps(frame, 0)) data = pickle.dumps(frame, 0) size =", "import time import pickle import zlib client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485)) connection = client_socket.makefile('wb') cam = cv2.VideoCapture(\"E:/songs/Attention <NAME>(GabbarWorld.com)", "cam.set(4, 240) img_counter = 0 encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90] while", "8485)) connection = client_socket.makefile('wb') cam = cv2.VideoCapture(\"E:/songs/Attention <NAME>(GabbarWorld.com) 1080p.mp4\") cam.set(3,", "client_socket.makefile('wb') cam = cv2.VideoCapture(\"E:/songs/Attention <NAME>(GabbarWorld.com) 1080p.mp4\") cam.set(3, 320) cam.set(4, 240)", "cv2.imencode('.jpg', frame, encode_param) # data = zlib.compress(pickle.dumps(frame, 0)) data =", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485)) connection = client_socket.makefile('wb') cam =", "0 encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90] while True: ret, frame =", "cam.set(3, 320) cam.set(4, 240) img_counter = 0 encode_param = [int(cv2.IMWRITE_JPEG_QUALITY),", "0)) data = pickle.dumps(frame, 0) size = len(data) print(\"{}: {}\".format(img_counter,", "import io import socket import struct import time import pickle", "import pickle import zlib client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485))", "frame = cam.read() result, frame = cv2.imencode('.jpg', frame, encode_param) #", "zlib.compress(pickle.dumps(frame, 0)) data = pickle.dumps(frame, 0) size = len(data) print(\"{}:", "[int(cv2.IMWRITE_JPEG_QUALITY), 90] while True: ret, frame = cam.read() result, frame", "cv2.VideoCapture(\"E:/songs/Attention <NAME>(GabbarWorld.com) 1080p.mp4\") cam.set(3, 320) cam.set(4, 240) img_counter = 0", "len(data) print(\"{}: {}\".format(img_counter, size)) client_socket.sendall(struct.pack(\">L\", size) + data) img_counter +=", "time import pickle import zlib client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1',", "zlib client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485)) connection = client_socket.makefile('wb')", "240) img_counter = 0 encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90] while True:", "= pickle.dumps(frame, 0) size = len(data) print(\"{}: {}\".format(img_counter, size)) client_socket.sendall(struct.pack(\">L\",", "encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90] while True: ret, frame = cam.read()", "= len(data) print(\"{}: {}\".format(img_counter, size)) client_socket.sendall(struct.pack(\">L\", size) + data) img_counter" ]
[ "average Returns: Operation that does the update \"\"\" updates =", "return tf.keras.Sequential(layers) def stack_dense_layer(layer_cfg): \"\"\"Stack Dense layers. Args: layer_cfg: list", "integers specifying how many channels are at each hidden layer", "polyak_rate=1.0): \"\"\"Update the target variables using exponential moving average. Specifically,", "[1, 1, variable_length, 1])) # b shape: [B, ?, ?,", "2.0 (the \"License\"); # you may not use this file", "c in n_layer_channel], 1) def stack_conv_layer(layer_cfg, padding='same'): \"\"\"Stack convolution layers", "__future__ import division import tensorflow as tf def film_params(sentence_embedding, n_layer_channel):", "== (batch_size, 64, embedding_dim) # hidden shape == (batch_size, hidden_size)", "GRE encoder. Attributes: embedding: word embedding matrix gru: the GRU", "are at each hidden layer to be FiLM'ed Returns: a", "variables.\"\"\" # For not trainable variables do hard updates. return", "the concatenated vector to the GRU output, state = self.gru(x)", "updates def vector_tensor_product(a, b): \"\"\"\"Returns keras layer that perfrom a", "inputs: tf.tile(inputs[0], multiples=inputs[1])) a = tile_layer((a, [1, 1, variable_length, 1]))", "each tuple should be (channel, kernel size, strides) padding: what", "units: number of units of the memory state vocab_size: total", "x = self.embedding(x) # passing the concatenated vector to the", "https://www.tensorflow.org/tutorials/text/image_captioning \"\"\" # Lint as: python3 # pylint: disable=invalid-name from", "(batch_size, 1, hidden_size) hidden_with_time_axis = tf.expand_dims(hidden, 1) # score shape", "padding the conv layers use Returns: the keras model with", "2.0. Partially adapted from: https://www.tensorflow.org/tutorials/text/image_captioning \"\"\" # Lint as: python3", "score to self.V attention_weights = tf.nn.softmax(self.V(score), axis=1) # context_vector shape", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "cfg in layer_cfg[:-1]: layers.append( tf.keras.layers.Conv2D( filters=cfg[0], kernel_size=cfg[1], strides=cfg[2], activation=tf.nn.relu, padding=padding))", "self.V = tf.keras.layers.Dense(1) def call(self, features, hidden): # features(CNN_encoder output)", "shape = shape_layer(b) shape_numpy = b.get_shape() variable_length = shape[1] #", "1 at the last axis because you are applying score", "= tf.nn.softmax(self.V(score), axis=1) # context_vector shape after sum == (batch_size,", "use Returns: the keras model with stacked conv layers \"\"\"", "= tf.reduce_sum(context_vector, axis=1) return context_vector, attention_weights class GRUEnecoder(tf.keras.Model): \"\"\"TF2.0 GRE", "concatenated vector to the GRU output, state = self.gru(x) return", "[B, ?, d], b shape: [B, ?, d] shape_layer =", "vocab_size): \"\"\"Initialize the GRU encoder. Args: embedding_dim: dimension of word", "for cfg in layer_cfg[:-1]: layers.append( tf.keras.layers.Conv2D( filters=cfg[0], kernel_size=cfg[1], strides=cfg[2], activation=tf.nn.relu,", "in layer_cfg[:-1]: layers.append( tf.keras.layers.Conv2D( filters=cfg[0], kernel_size=cfg[1], strides=cfg[2], activation=tf.nn.relu, padding=padding)) final_cfg", "dense layers \"\"\" super(BahdanauAttention, self).__init__() self.W1 = tf.keras.layers.Dense(units) self.W2 =", "in n_layer_channel], 1) def stack_conv_layer(layer_cfg, padding='same'): \"\"\"Stack convolution layers per", "1) # score shape == (batch_size, 64, hidden_size) score =", "the GRU output, state = self.gru(x) return output, state def", "layer Returns: the keras model with stacked dense layers \"\"\"", "Copyright 2022 The Google Research Authors. # # Licensed under", "\"\"\"\"Returns keras layer that perfrom a outer product between a", "\"\"\"Stack Dense layers. Args: layer_cfg: list of integer specifying the", "a = expand_dims_layer_1(a) # a shape: [B, ?, 1, d]", "should be (channel, kernel size, strides) padding: what kind of", "shape_layer = tf.keras.layers.Lambda(tf.shape) shape = shape_layer(b) shape_numpy = b.get_shape() variable_length", "# passing the concatenated vector to the GRU output, state", "w1: weights that process the feature w2: weights that process", "# context_vector shape after sum == (batch_size, hidden_size) context_vector =", "number of units at each layer Returns: the keras model", "of tensors the same length as n_layer_channel. Each element contains", "layers.append( tf.keras.layers.Conv2D( filters=cfg[0], kernel_size=cfg[1], strides=cfg[2], activation=tf.nn.relu, padding=padding)) final_cfg = layer_cfg[-1]", "?, ?, d] return tf.keras.layers.concatenate([a, b]) # shape: [B, ?,", "use this file except in compliance with the License. #", "import tensorflow as tf def film_params(sentence_embedding, n_layer_channel): \"\"\"Generate FiLM parameters", "because you are applying score to self.V attention_weights = tf.nn.softmax(self.V(score),", "layers = [] for cfg in layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1]))", "def stack_conv_layer(layer_cfg, padding='same'): \"\"\"Stack convolution layers per layer_cfg. Args: layer_cfg:", "# hidden shape == (batch_size, hidden_size) # hidden_with_time_axis shape ==", "dimension exists. Args: sentence_embedding: a tensor containing batched sentenced embedding", "observations polyak_rate: rate of moving average Returns: Operation that does", "embedding matrix gru: the GRU layer \"\"\" def __init__(self, embedding_dim,", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "(batch_size, 64, 1) # you get 1 at the last", "and # limitations under the License. \"\"\"Utilities for Tensorflow 2.0.", "1, variable_length, 1])) # a shape: [B, ?, ?, d]", "License. # You may obtain a copy of the License", "absolute_import from __future__ import division import tensorflow as tf def", "activation=tf.nn.relu, padding=padding)) final_cfg = layer_cfg[-1] layers.append( tf.keras.layers.Conv2D( final_cfg[0], final_cfg[1], final_cfg[2],", "shape == (batch_size, 64, embedding_dim) # hidden shape == (batch_size,", "attention_weights shape == (batch_size, 64, 1) # you get 1", "axis because you are applying score to self.V attention_weights =", "= tf.keras.layers.Dense( 2 * sum * (n_layer_channel), activation=tf.nn.relu) return tf.split(all_params,", "n_layer_channel): \"\"\"Generate FiLM parameters from a sentence embedding. Generate FiLM", "average variables target_variables: the new observations polyak_rate: rate of moving", "?, ?, d] b = tile_layer((b, [1, 1, variable_length, 1]))", "under the License is distributed on an \"AS IS\" BASIS,", "return tf.split(all_params, [c * 2 for c in n_layer_channel], 1)", "at each layer Returns: the keras model with stacked dense", "vector to scalar \"\"\" def __init__(self, units): \"\"\"Initialize Bahdanau attention", "License for the specific language governing permissions and # limitations", "n_total = sum(n_layer_channel) * 2 all_params = tf.layers.dense(sentence_embedding, n_total) all_params", "strides) padding: what kind of padding the conv layers use", "d] b = tf.keras.layers.Permute((2, 1, 3))(b) # b shape: [B,", "* 2 for c in n_layer_channel], 1) def stack_conv_layer(layer_cfg, padding='same'):", "self.gru(x) return output, state def reset_state(self, batch_size): return tf.zeros((batch_size, self._units))", "# shape: [B, ?, ?, 2*d] class BahdanauAttention(tf.keras.Model): \"\"\"Bahdanau Attention", "are applying score to self.V attention_weights = tf.nn.softmax(self.V(score), axis=1) #", "update \"\"\" updates = [] for (v_s, v_t) in zip(source_variables,", "return v1.assign(polyak_rate * v1 + (1 - polyak_rate) * v2)", "parameter each layer; each tuple should be (channel, kernel size,", "get 1 at the last axis because you are applying", "weights that process the feature w2: weights that process the", "* (n_layer_channel), activation=tf.nn.relu) return tf.split(all_params, [c * 2 for c", "self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform') def call(self, x, hidden): # x", "1, 3))(b) # b shape: [B, ?, ?, d] return", "weights that process the memory state v: projection layer that", "layer that project score vector to scalar \"\"\" def __init__(self,", "n_total) all_params = tf.keras.layers.Dense( 2 * sum * (n_layer_channel), activation=tf.nn.relu)", "\"\"\"Initialize the GRU encoder. Args: embedding_dim: dimension of word emebdding", "= sum(n_layer_channel) * 2 all_params = tf.layers.dense(sentence_embedding, n_total) all_params =", "using exponential moving average. Specifically, v_s' = v_s * polyak_rate", "output, state = self.gru(x) return output, state def reset_state(self, batch_size):", "shape: [B, ?, ?, 2*d] class BahdanauAttention(tf.keras.Model): \"\"\"Bahdanau Attention Layer.", "Args: layer_cfg: list of integer specifying the number of units", "updates = [] for (v_s, v_t) in zip(source_variables, target_variables): v_t.shape.assert_is_compatible_with(v_s.shape)", "* v1 + (1 - polyak_rate) * v2) update =", "context_vector = tf.reduce_sum(context_vector, axis=1) return context_vector, attention_weights class GRUEnecoder(tf.keras.Model): \"\"\"TF2.0", "= [] for cfg in layer_cfg[:-1]: layers.append( tf.keras.layers.Conv2D( filters=cfg[0], kernel_size=cfg[1],", "variables using exponential moving average. Specifically, v_s' = v_s *", "in compliance with the License. # You may obtain a", "adapted from: https://www.tensorflow.org/tutorials/text/image_captioning \"\"\" # Lint as: python3 # pylint:", "Returns: the keras model with stacked conv layers \"\"\" layers", "software # distributed under the License is distributed on an", "polyak_rate) * v2) update = update_fn(v_t, v_s) updates.append(update) return updates", "layer. Args: units: size of the dense layers \"\"\" super(BahdanauAttention,", "= attention_weights * features context_vector = tf.reduce_sum(context_vector, axis=1) return context_vector,", "x shape after passing through embedding == (batch_size, 1, embedding_dim)", "stacked conv layers \"\"\" layers = [] for cfg in", "as n_layer_channel. Each element contains all gamma_i and beta_i for", "embedding_dim) self.gru = tf.keras.layers.GRU( self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform') def call(self,", "variables do hard updates. return v1.assign(polyak_rate * v1 + (1", "shape: [B, ?, ?, d] return tf.keras.layers.concatenate([a, b]) # shape:", "\"\"\" def __init__(self, embedding_dim, units, vocab_size): \"\"\"Initialize the GRU encoder.", "x, hidden): # x shape after passing through embedding ==", "# you get 1 at the last axis because you", "element contains all gamma_i and beta_i for a single hidden", "features(CNN_encoder output) shape == (batch_size, 64, embedding_dim) # hidden shape", "tuple should be (channel, kernel size, strides) padding: what kind", "specifying how many channels are at each hidden layer to", "d] shape_layer = tf.keras.layers.Lambda(tf.shape) shape = shape_layer(b) shape_numpy = b.get_shape()", "padding='same'): \"\"\"Stack convolution layers per layer_cfg. Args: layer_cfg: list of", "License. \"\"\"Utilities for Tensorflow 2.0. Partially adapted from: https://www.tensorflow.org/tutorials/text/image_captioning \"\"\"", "hidden): # x shape after passing through embedding == (batch_size,", "def film_params(sentence_embedding, n_layer_channel): \"\"\"Generate FiLM parameters from a sentence embedding.", "score vector to scalar \"\"\" def __init__(self, units): \"\"\"Initialize Bahdanau", "Google Research Authors. # # Licensed under the Apache License,", "in layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return tf.keras.Sequential(layers) def soft_variables_update(source_variables, target_variables,", "of moving average Returns: Operation that does the update \"\"\"", "per layer_cfg. Args: layer_cfg: list of integer tuples specifying the", "filters=cfg[0], kernel_size=cfg[1], strides=cfg[2], activation=tf.nn.relu, padding=padding)) final_cfg = layer_cfg[-1] layers.append( tf.keras.layers.Conv2D(", "layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return tf.keras.Sequential(layers) def soft_variables_update(source_variables, target_variables, polyak_rate=1.0):", "specifying the number of units at each layer Returns: the", "\"\"\"TF2.0 GRE encoder. Attributes: embedding: word embedding matrix gru: the", "1, hidden_size) hidden_with_time_axis = tf.expand_dims(hidden, 1) # score shape ==", "# variable_len = ? expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) expand_dims_layer_2", "product between a and b.\"\"\" # a shape: [B, ?,", "target_variables, polyak_rate=1.0): \"\"\"Update the target variables using exponential moving average.", "integer specifying the number of units at each layer Returns:", "do hard updates. return v1.assign(polyak_rate * v1 + (1 -", "<reponame>gunpowder78/google-research # coding=utf-8 # Copyright 2022 The Google Research Authors.", "output) shape == (batch_size, 64, embedding_dim) # hidden shape ==", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "batched sentenced embedding to be transformed n_layer_channel: a list of", "[B, ?, ?, d] b = tile_layer((b, [1, 1, variable_length,", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "(1 - polyak_rate) * v2) update = update_fn(v_t, v_s) updates.append(update)", "b = expand_dims_layer_2(b) # a shape: [B, ?, 1, d]", "layers \"\"\" layers = [] for cfg in layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg,", "b shape: [B, ?, ?, d] return tf.keras.layers.concatenate([a, b]) #", "= tf.expand_dims(hidden, 1) # score shape == (batch_size, 64, hidden_size)", "layer_cfg[-1] layers.append( tf.keras.layers.Conv2D( final_cfg[0], final_cfg[1], final_cfg[2], padding=padding)) return tf.keras.Sequential(layers) def", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "b = tf.keras.layers.Permute((2, 1, 3))(b) # b shape: [B, ?,", "layers \"\"\" super(BahdanauAttention, self).__init__() self.W1 = tf.keras.layers.Dense(units) self.W2 = tf.keras.layers.Dense(units)", "to in writing, software # distributed under the License is", "embedding_dim) x = self.embedding(x) # passing the concatenated vector to", "layer; each tuple should be (channel, kernel size, strides) padding:", "context_vector, attention_weights class GRUEnecoder(tf.keras.Model): \"\"\"TF2.0 GRE encoder. Attributes: embedding: word", "# See the License for the specific language governing permissions", "FiLM'ed Returns: a tuple of tensors the same length as", "= tf.keras.layers.Dense(1) def call(self, features, hidden): # features(CNN_encoder output) shape", "for (v_s, v_t) in zip(source_variables, target_variables): v_t.shape.assert_is_compatible_with(v_s.shape) def update_fn(v1, v2):", "or agreed to in writing, software # distributed under the", "final_cfg[1], final_cfg[2], padding=padding)) return tf.keras.Sequential(layers) def stack_dense_layer(layer_cfg): \"\"\"Stack Dense layers.", "transformed n_layer_channel: a list of integers specifying how many channels", "required by applicable law or agreed to in writing, software", "kernel size, strides) padding: what kind of padding the conv", "as: python3 # pylint: disable=invalid-name from __future__ import absolute_import from", "Operation that does the update \"\"\" updates = [] for", "b shape: [B, ?, d] shape_layer = tf.keras.layers.Lambda(tf.shape) shape =", "tf.tile(inputs[0], multiples=inputs[1])) a = tile_layer((a, [1, 1, variable_length, 1])) #", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "\"\"\"Stack convolution layers per layer_cfg. Args: layer_cfg: list of integer", "with the License. # You may obtain a copy of", "variable_length = shape[1] # variable_len = ? expand_dims_layer_1 = tf.keras.layers.Reshape((-1,", "self.embedding(x) # passing the concatenated vector to the GRU output,", "the new observations polyak_rate: rate of moving average Returns: Operation", "* 2 all_params = tf.layers.dense(sentence_embedding, n_total) all_params = tf.keras.layers.Dense( 2", "vector_tensor_product(a, b): \"\"\"\"Returns keras layer that perfrom a outer product", "of integers specifying how many channels are at each hidden", "BahdanauAttention(tf.keras.Model): \"\"\"Bahdanau Attention Layer. Attributes: w1: weights that process the", "same length as n_layer_channel. Each element contains all gamma_i and", "a shape: [B, ?, ?, d] b = tile_layer((b, [1,", "(batch_size, hidden_size) # hidden_with_time_axis shape == (batch_size, 1, hidden_size) hidden_with_time_axis", "the same length as n_layer_channel. Each element contains all gamma_i", "a tuple of tensors the same length as n_layer_channel. Each", "tf.keras.layers.Lambda(tf.shape) shape = shape_layer(b) shape_numpy = b.get_shape() variable_length = shape[1]", "b): \"\"\"\"Returns keras layer that perfrom a outer product between", "list of integers specifying how many channels are at each", "many channels are at each hidden layer to be FiLM'ed", "64, hidden_size) score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis)) # attention_weights shape", "all_params = tf.keras.layers.Dense( 2 * sum * (n_layer_channel), activation=tf.nn.relu) return", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "embedding. This method assumes a batch dimension exists. Args: sentence_embedding:", "Returns: Operation that does the update \"\"\" updates = []", "2 all_params = tf.layers.dense(sentence_embedding, n_total) all_params = tf.keras.layers.Dense( 2 *", "distributed under the License is distributed on an \"AS IS\"", "expand_dims_layer_1(a) # a shape: [B, ?, 1, d] b =", "b shape: [B, ?, ?, d] b = tf.keras.layers.Permute((2, 1,", "self.W1 = tf.keras.layers.Dense(units) self.W2 = tf.keras.layers.Dense(units) self.V = tf.keras.layers.Dense(1) def", "Bahdanau attention layer. Args: units: size of the dense layers", "sentence_embedding: a tensor containing batched sentenced embedding to be transformed", "word embedding matrix gru: the GRU layer \"\"\" def __init__(self,", "= [] for (v_s, v_t) in zip(source_variables, target_variables): v_t.shape.assert_is_compatible_with(v_s.shape) def", "specifying the parameter each layer; each tuple should be (channel,", "target_variables: the new observations polyak_rate: rate of moving average Returns:", "process the feature w2: weights that process the memory state", "self.W2(hidden_with_time_axis)) # attention_weights shape == (batch_size, 64, 1) # you", "exists. Args: sentence_embedding: a tensor containing batched sentenced embedding to", "\"\"\" layers = [] for cfg in layer_cfg[:-1]: layers.append( tf.keras.layers.Conv2D(", "layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return tf.keras.Sequential(layers) def soft_variables_update(source_variables, target_variables, polyak_rate=1.0): \"\"\"Update the target", "2*d] class BahdanauAttention(tf.keras.Model): \"\"\"Bahdanau Attention Layer. Attributes: w1: weights that", "variable_len = ? expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) expand_dims_layer_2 =", "express or implied. # See the License for the specific", "# limitations under the License. \"\"\"Utilities for Tensorflow 2.0. Partially", "except in compliance with the License. # You may obtain", "hidden_size) context_vector = attention_weights * features context_vector = tf.reduce_sum(context_vector, axis=1)", "[B, ?, d] shape_layer = tf.keras.layers.Lambda(tf.shape) shape = shape_layer(b) shape_numpy", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "the number of units at each layer Returns: the keras", "return_state=True, recurrent_initializer='glorot_uniform') def call(self, x, hidden): # x shape after", "a shape: [B, ?, d], b shape: [B, ?, d]", "padding: what kind of padding the conv layers use Returns:", "not use this file except in compliance with the License.", "tf.nn.softmax(self.V(score), axis=1) # context_vector shape after sum == (batch_size, hidden_size)", "attention layer. Args: units: size of the dense layers \"\"\"", "Each element contains all gamma_i and beta_i for a single", "a sentence embedding. This method assumes a batch dimension exists.", "writing, software # distributed under the License is distributed on", "Partially adapted from: https://www.tensorflow.org/tutorials/text/image_captioning \"\"\" # Lint as: python3 #", "n_layer_channel. Each element contains all gamma_i and beta_i for a", "state = self.gru(x) return output, state def reset_state(self, batch_size): return", "super(BahdanauAttention, self).__init__() self.W1 = tf.keras.layers.Dense(units) self.W2 = tf.keras.layers.Dense(units) self.V =", "you may not use this file except in compliance with", "layers use Returns: the keras model with stacked conv layers", "passing through embedding == (batch_size, 1, embedding_dim) x = self.embedding(x)", "GRUEnecoder(tf.keras.Model): \"\"\"TF2.0 GRE encoder. Attributes: embedding: word embedding matrix gru:", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "return tf.keras.layers.concatenate([a, b]) # shape: [B, ?, ?, 2*d] class", "sum(n_layer_channel) * 2 all_params = tf.layers.dense(sentence_embedding, n_total) all_params = tf.keras.layers.Dense(", "= tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis)) # attention_weights shape == (batch_size, 64,", "expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1]))", "\"\"\" def __init__(self, units): \"\"\"Initialize Bahdanau attention layer. Args: units:", "2022 The Google Research Authors. # # Licensed under the", "tf.keras.layers.Dense( 2 * sum * (n_layer_channel), activation=tf.nn.relu) return tf.split(all_params, [c", "tf.keras.layers.Conv2D( filters=cfg[0], kernel_size=cfg[1], strides=cfg[2], activation=tf.nn.relu, padding=padding)) final_cfg = layer_cfg[-1] layers.append(", "= tf.keras.layers.Embedding(vocab_size, embedding_dim) self.gru = tf.keras.layers.GRU( self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform')", "hard updates. return v1.assign(polyak_rate * v1 + (1 - polyak_rate)", "variable_length, 1])) # a shape: [B, ?, ?, d] b", "+ self.W2(hidden_with_time_axis)) # attention_weights shape == (batch_size, 64, 1) #", "pylint: disable=invalid-name from __future__ import absolute_import from __future__ import division", "+ (1-polyak_rate) * v_t Args: source_variables: the moving average variables", "CONDITIONS OF ANY KIND, either express or implied. # See", "you get 1 at the last axis because you are", "\"\"\"Utilities for Tensorflow 2.0. Partially adapted from: https://www.tensorflow.org/tutorials/text/image_captioning \"\"\" #", "2 for c in n_layer_channel], 1) def stack_conv_layer(layer_cfg, padding='same'): \"\"\"Stack", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "# a shape: [B, ?, 1, d] tile_layer = tf.keras.layers.Lambda(", "1])) # b shape: [B, ?, ?, d] b =", "Attributes: embedding: word embedding matrix gru: the GRU layer \"\"\"", "def call(self, features, hidden): # features(CNN_encoder output) shape == (batch_size,", "hidden_size) # hidden_with_time_axis shape == (batch_size, 1, hidden_size) hidden_with_time_axis =", "the feature w2: weights that process the memory state v:", "single hidden layer. \"\"\" n_total = sum(n_layer_channel) * 2 all_params", "score shape == (batch_size, 64, hidden_size) score = tf.nn.tanh(self.W1(features) +", "does the update \"\"\" updates = [] for (v_s, v_t)", "for cfg in layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return tf.keras.Sequential(layers) def", "rate of moving average Returns: Operation that does the update", "length as n_layer_channel. Each element contains all gamma_i and beta_i", "memory state vocab_size: total number of vocabulary \"\"\" super(GRUEnecoder, self).__init__()", "tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis)) # attention_weights shape == (batch_size, 64, 1)", "?, 1, d] tile_layer = tf.keras.layers.Lambda( lambda inputs: tf.tile(inputs[0], multiples=inputs[1]))", "1, d] tile_layer = tf.keras.layers.Lambda( lambda inputs: tf.tile(inputs[0], multiples=inputs[1])) a", "tuple of tensors the same length as n_layer_channel. Each element", "padding=padding)) return tf.keras.Sequential(layers) def stack_dense_layer(layer_cfg): \"\"\"Stack Dense layers. Args: layer_cfg:", "?, d] b = tf.keras.layers.Permute((2, 1, 3))(b) # b shape:", "# b shape: [B, ?, ?, d] return tf.keras.layers.concatenate([a, b])", "= shape[1] # variable_len = ? expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1,", "hidden layer. \"\"\" n_total = sum(n_layer_channel) * 2 all_params =", "self.V attention_weights = tf.nn.softmax(self.V(score), axis=1) # context_vector shape after sum", "units self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim) self.gru = tf.keras.layers.GRU( self.units, return_sequences=True,", "* features context_vector = tf.reduce_sum(context_vector, axis=1) return context_vector, attention_weights class", "layer_cfg[:-1]: layers.append( tf.keras.layers.Conv2D( filters=cfg[0], kernel_size=cfg[1], strides=cfg[2], activation=tf.nn.relu, padding=padding)) final_cfg =", "# attention_weights shape == (batch_size, 64, 1) # you get", "\"\"\" super(GRUEnecoder, self).__init__() self._units = units self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim)", "Args: embedding_dim: dimension of word emebdding units: number of units", "kernel_size=cfg[1], strides=cfg[2], activation=tf.nn.relu, padding=padding)) final_cfg = layer_cfg[-1] layers.append( tf.keras.layers.Conv2D( final_cfg[0],", "d], b shape: [B, ?, d] shape_layer = tf.keras.layers.Lambda(tf.shape) shape", "OR CONDITIONS OF ANY KIND, either express or implied. #", "layer_cfg: list of integer specifying the number of units at", "* sum * (n_layer_channel), activation=tf.nn.relu) return tf.split(all_params, [c * 2", "shape == (batch_size, 1, hidden_size) hidden_with_time_axis = tf.expand_dims(hidden, 1) #", "lambda inputs: tf.tile(inputs[0], multiples=inputs[1])) a = tile_layer((a, [1, 1, variable_length,", "= tf.keras.layers.Dense(units) self.W2 = tf.keras.layers.Dense(units) self.V = tf.keras.layers.Dense(1) def call(self,", "the License is distributed on an \"AS IS\" BASIS, #", "units): \"\"\"Initialize Bahdanau attention layer. Args: units: size of the", "score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis)) # attention_weights shape == (batch_size,", "you are applying score to self.V attention_weights = tf.nn.softmax(self.V(score), axis=1)", "def __init__(self, embedding_dim, units, vocab_size): \"\"\"Initialize the GRU encoder. Args:", "features, hidden): # features(CNN_encoder output) shape == (batch_size, 64, embedding_dim)", "hidden): # features(CNN_encoder output) shape == (batch_size, 64, embedding_dim) #", "self._units = units self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim) self.gru = tf.keras.layers.GRU(", "from a sentence embedding. This method assumes a batch dimension", "v_t Args: source_variables: the moving average variables target_variables: the new", "?, d] shape_layer = tf.keras.layers.Lambda(tf.shape) shape = shape_layer(b) shape_numpy =", "# x shape after passing through embedding == (batch_size, 1,", "that process the feature w2: weights that process the memory", "\"\"\" n_total = sum(n_layer_channel) * 2 all_params = tf.layers.dense(sentence_embedding, n_total)", "to self.V attention_weights = tf.nn.softmax(self.V(score), axis=1) # context_vector shape after", "beta_i for a single hidden layer. \"\"\" n_total = sum(n_layer_channel)", "at each hidden layer to be FiLM'ed Returns: a tuple", "for c in n_layer_channel], 1) def stack_conv_layer(layer_cfg, padding='same'): \"\"\"Stack convolution", "b = tile_layer((b, [1, 1, variable_length, 1])) # b shape:", "1) def stack_conv_layer(layer_cfg, padding='same'): \"\"\"Stack convolution layers per layer_cfg. Args:", "1])) # a shape: [B, ?, ?, d] b =", "update = update_fn(v_t, v_s) updates.append(update) return updates def vector_tensor_product(a, b):", "layer_cfg. Args: layer_cfg: list of integer tuples specifying the parameter", "the memory state vocab_size: total number of vocabulary \"\"\" super(GRUEnecoder,", "1, embedding_dim) x = self.embedding(x) # passing the concatenated vector", "moving average variables target_variables: the new observations polyak_rate: rate of", "= shape_layer(b) shape_numpy = b.get_shape() variable_length = shape[1] # variable_len", "law or agreed to in writing, software # distributed under", "[1, 1, variable_length, 1])) # a shape: [B, ?, ?,", "of units at each layer Returns: the keras model with", "contains all gamma_i and beta_i for a single hidden layer.", "a shape: [B, ?, 1, d] tile_layer = tf.keras.layers.Lambda( lambda", "attention_weights * features context_vector = tf.reduce_sum(context_vector, axis=1) return context_vector, attention_weights", "all_params = tf.layers.dense(sentence_embedding, n_total) all_params = tf.keras.layers.Dense( 2 * sum", "encoder. Args: embedding_dim: dimension of word emebdding units: number of", "context_vector = attention_weights * features context_vector = tf.reduce_sum(context_vector, axis=1) return", "tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) a = expand_dims_layer_1(a) # a shape: [B,", "as tf def film_params(sentence_embedding, n_layer_channel): \"\"\"Generate FiLM parameters from a", "shape_numpy = b.get_shape() variable_length = shape[1] # variable_len = ?", "embedding: word embedding matrix gru: the GRU layer \"\"\" def", "def __init__(self, units): \"\"\"Initialize Bahdanau attention layer. Args: units: size", "governing permissions and # limitations under the License. \"\"\"Utilities for", "method assumes a batch dimension exists. Args: sentence_embedding: a tensor", "shape_numpy[-1])) a = expand_dims_layer_1(a) # a shape: [B, ?, 1,", "tile_layer((a, [1, 1, variable_length, 1])) # a shape: [B, ?,", "(1-polyak_rate) * v_t Args: source_variables: the moving average variables target_variables:", "= tf.keras.layers.Lambda(tf.shape) shape = shape_layer(b) shape_numpy = b.get_shape() variable_length =", "may obtain a copy of the License at # #", "b.\"\"\" # a shape: [B, ?, d], b shape: [B,", "after sum == (batch_size, hidden_size) context_vector = attention_weights * features", "keras model with stacked dense layers \"\"\" layers = []", "at the last axis because you are applying score to", "?, ?, 2*d] class BahdanauAttention(tf.keras.Model): \"\"\"Bahdanau Attention Layer. Attributes: w1:", "the update \"\"\" updates = [] for (v_s, v_t) in", "# pylint: disable=invalid-name from __future__ import absolute_import from __future__ import", "return updates def vector_tensor_product(a, b): \"\"\"\"Returns keras layer that perfrom", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "shape after passing through embedding == (batch_size, 1, embedding_dim) x", "with stacked dense layers \"\"\" layers = [] for cfg", "shape_numpy[-1])) expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) a = expand_dims_layer_1(a) #", "updates. return v1.assign(polyak_rate * v1 + (1 - polyak_rate) *", "# a shape: [B, ?, d], b shape: [B, ?,", "[B, ?, ?, d] return tf.keras.layers.concatenate([a, b]) # shape: [B,", "with stacked conv layers \"\"\" layers = [] for cfg", "may not use this file except in compliance with the", "call(self, features, hidden): # features(CNN_encoder output) shape == (batch_size, 64,", "# Lint as: python3 # pylint: disable=invalid-name from __future__ import", "final_cfg = layer_cfg[-1] layers.append( tf.keras.layers.Conv2D( final_cfg[0], final_cfg[1], final_cfg[2], padding=padding)) return", "multiples=inputs[1])) a = tile_layer((a, [1, 1, variable_length, 1])) # a", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "[] for (v_s, v_t) in zip(source_variables, target_variables): v_t.shape.assert_is_compatible_with(v_s.shape) def update_fn(v1,", "source_variables: the moving average variables target_variables: the new observations polyak_rate:", "this file except in compliance with the License. # You", "between a and b.\"\"\" # a shape: [B, ?, d],", "\"\"\" # Lint as: python3 # pylint: disable=invalid-name from __future__", "a and b.\"\"\" # a shape: [B, ?, d], b", "\"\"\" updates = [] for (v_s, v_t) in zip(source_variables, target_variables):", "attention_weights = tf.nn.softmax(self.V(score), axis=1) # context_vector shape after sum ==", "Attention Layer. Attributes: w1: weights that process the feature w2:", "d] tile_layer = tf.keras.layers.Lambda( lambda inputs: tf.tile(inputs[0], multiples=inputs[1])) a =", "= tile_layer((a, [1, 1, variable_length, 1])) # a shape: [B,", "the last axis because you are applying score to self.V", "= b.get_shape() variable_length = shape[1] # variable_len = ? expand_dims_layer_1", "Args: source_variables: the moving average variables target_variables: the new observations", "tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) a =", "that process the memory state v: projection layer that project", "each hidden layer to be FiLM'ed Returns: a tuple of", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "conv layers \"\"\" layers = [] for cfg in layer_cfg[:-1]:", "tensors the same length as n_layer_channel. Each element contains all", "(batch_size, 64, hidden_size) score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis)) # attention_weights", "# # Licensed under the Apache License, Version 2.0 (the", "\"\"\"Generate FiLM parameters from a sentence embedding. Generate FiLM parameters", "of word emebdding units: number of units of the memory", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "division import tensorflow as tf def film_params(sentence_embedding, n_layer_channel): \"\"\"Generate FiLM", "state vocab_size: total number of vocabulary \"\"\" super(GRUEnecoder, self).__init__() self._units", "64, embedding_dim) # hidden shape == (batch_size, hidden_size) # hidden_with_time_axis", "tf.reduce_sum(context_vector, axis=1) return context_vector, attention_weights class GRUEnecoder(tf.keras.Model): \"\"\"TF2.0 GRE encoder.", "(v_s, v_t) in zip(source_variables, target_variables): v_t.shape.assert_is_compatible_with(v_s.shape) def update_fn(v1, v2): \"\"\"Update", "import absolute_import from __future__ import division import tensorflow as tf", "features context_vector = tf.reduce_sum(context_vector, axis=1) return context_vector, attention_weights class GRUEnecoder(tf.keras.Model):", "[] for cfg in layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return tf.keras.Sequential(layers)", "= tf.layers.dense(sentence_embedding, n_total) all_params = tf.keras.layers.Dense( 2 * sum *", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "of integer specifying the number of units at each layer", "(batch_size, 64, embedding_dim) # hidden shape == (batch_size, hidden_size) #", "This method assumes a batch dimension exists. Args: sentence_embedding: a", "be transformed n_layer_channel: a list of integers specifying how many", "and beta_i for a single hidden layer. \"\"\" n_total =", "Returns: a tuple of tensors the same length as n_layer_channel.", "return tf.keras.Sequential(layers) def soft_variables_update(source_variables, target_variables, polyak_rate=1.0): \"\"\"Update the target variables", "class GRUEnecoder(tf.keras.Model): \"\"\"TF2.0 GRE encoder. Attributes: embedding: word embedding matrix", "= self.gru(x) return output, state def reset_state(self, batch_size): return tf.zeros((batch_size,", "* v_t Args: source_variables: the moving average variables target_variables: the", "after passing through embedding == (batch_size, 1, embedding_dim) x =", "[c * 2 for c in n_layer_channel], 1) def stack_conv_layer(layer_cfg,", "(n_layer_channel), activation=tf.nn.relu) return tf.split(all_params, [c * 2 for c in", "average. Specifically, v_s' = v_s * polyak_rate + (1-polyak_rate) *", "that perfrom a outer product between a and b.\"\"\" #", "(batch_size, hidden_size) context_vector = attention_weights * features context_vector = tf.reduce_sum(context_vector,", "[B, ?, ?, 2*d] class BahdanauAttention(tf.keras.Model): \"\"\"Bahdanau Attention Layer. Attributes:", "embedding == (batch_size, 1, embedding_dim) x = self.embedding(x) # passing", "v_t) in zip(source_variables, target_variables): v_t.shape.assert_is_compatible_with(v_s.shape) def update_fn(v1, v2): \"\"\"Update variables.\"\"\"", "1, variable_length, 1])) # b shape: [B, ?, ?, d]", "64, 1) # you get 1 at the last axis", "shape[1] # variable_len = ? expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1]))", "sum == (batch_size, hidden_size) context_vector = attention_weights * features context_vector", "1) # you get 1 at the last axis because", "batch dimension exists. Args: sentence_embedding: a tensor containing batched sentenced", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "layers per layer_cfg. Args: layer_cfg: list of integer tuples specifying", "d] return tf.keras.layers.concatenate([a, b]) # shape: [B, ?, ?, 2*d]", "the keras model with stacked dense layers \"\"\" layers =", "the conv layers use Returns: the keras model with stacked", "or implied. # See the License for the specific language", "sentence embedding. This method assumes a batch dimension exists. Args:", "expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) a = expand_dims_layer_1(a) # a", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "hidden_size) hidden_with_time_axis = tf.expand_dims(hidden, 1) # score shape == (batch_size,", "vocabulary \"\"\" super(GRUEnecoder, self).__init__() self._units = units self.embedding = tf.keras.layers.Embedding(vocab_size,", "the GRU encoder. Args: embedding_dim: dimension of word emebdding units:", "v_s' = v_s * polyak_rate + (1-polyak_rate) * v_t Args:", "?, 2*d] class BahdanauAttention(tf.keras.Model): \"\"\"Bahdanau Attention Layer. Attributes: w1: weights", "3))(b) # b shape: [B, ?, ?, d] return tf.keras.layers.concatenate([a,", "= self.embedding(x) # passing the concatenated vector to the GRU", "tensor containing batched sentenced embedding to be transformed n_layer_channel: a", "padding=padding)) final_cfg = layer_cfg[-1] layers.append( tf.keras.layers.Conv2D( final_cfg[0], final_cfg[1], final_cfg[2], padding=padding))", "layer. \"\"\" n_total = sum(n_layer_channel) * 2 all_params = tf.layers.dense(sentence_embedding,", "n_layer_channel: a list of integers specifying how many channels are", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "gru: the GRU layer \"\"\" def __init__(self, embedding_dim, units, vocab_size):", "layers. Args: layer_cfg: list of integer specifying the number of", "memory state v: projection layer that project score vector to", "from __future__ import division import tensorflow as tf def film_params(sentence_embedding,", "layer \"\"\" def __init__(self, embedding_dim, units, vocab_size): \"\"\"Initialize the GRU", "to the GRU output, state = self.gru(x) return output, state", "final_cfg[0], final_cfg[1], final_cfg[2], padding=padding)) return tf.keras.Sequential(layers) def stack_dense_layer(layer_cfg): \"\"\"Stack Dense", "self).__init__() self.W1 = tf.keras.layers.Dense(units) self.W2 = tf.keras.layers.Dense(units) self.V = tf.keras.layers.Dense(1)", "soft_variables_update(source_variables, target_variables, polyak_rate=1.0): \"\"\"Update the target variables using exponential moving", "(batch_size, 1, embedding_dim) x = self.embedding(x) # passing the concatenated", "projection layer that project score vector to scalar \"\"\" def", "hidden_size) score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis)) # attention_weights shape ==", "layers = [] for cfg in layer_cfg[:-1]: layers.append( tf.keras.layers.Conv2D( filters=cfg[0],", "total number of vocabulary \"\"\" super(GRUEnecoder, self).__init__() self._units = units", "(the \"License\"); # you may not use this file except", "assumes a batch dimension exists. Args: sentence_embedding: a tensor containing", "+ (1 - polyak_rate) * v2) update = update_fn(v_t, v_s)", "a = tile_layer((a, [1, 1, variable_length, 1])) # a shape:", "# you may not use this file except in compliance", "coding=utf-8 # Copyright 2022 The Google Research Authors. # #", "[] for cfg in layer_cfg[:-1]: layers.append( tf.keras.layers.Conv2D( filters=cfg[0], kernel_size=cfg[1], strides=cfg[2],", "? expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1,", "to be transformed n_layer_channel: a list of integers specifying how", "# b shape: [B, ?, ?, d] b = tf.keras.layers.Permute((2,", "[B, ?, 1, d] b = expand_dims_layer_2(b) # a shape:", "tf.keras.layers.Dense(units) self.V = tf.keras.layers.Dense(1) def call(self, features, hidden): # features(CNN_encoder", "1, shape_numpy[-1])) a = expand_dims_layer_1(a) # a shape: [B, ?,", "dimension of word emebdding units: number of units of the", "Args: layer_cfg: list of integer tuples specifying the parameter each", "state v: projection layer that project score vector to scalar", "emebdding units: number of units of the memory state vocab_size:", "= ? expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) expand_dims_layer_2 = tf.keras.layers.Reshape((-1,", "zip(source_variables, target_variables): v_t.shape.assert_is_compatible_with(v_s.shape) def update_fn(v1, v2): \"\"\"Update variables.\"\"\" # For", "d] b = tile_layer((b, [1, 1, variable_length, 1])) # b", "tf.keras.layers.Embedding(vocab_size, embedding_dim) self.gru = tf.keras.layers.GRU( self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform') def", "a sentence embedding. Generate FiLM parameters from a sentence embedding.", "- polyak_rate) * v2) update = update_fn(v_t, v_s) updates.append(update) return", "a single hidden layer. \"\"\" n_total = sum(n_layer_channel) * 2", "# # Unless required by applicable law or agreed to", "the target variables using exponential moving average. Specifically, v_s' =", "containing batched sentenced embedding to be transformed n_layer_channel: a list", "hidden_with_time_axis shape == (batch_size, 1, hidden_size) hidden_with_time_axis = tf.expand_dims(hidden, 1)", "shape == (batch_size, hidden_size) # hidden_with_time_axis shape == (batch_size, 1,", "stack_conv_layer(layer_cfg, padding='same'): \"\"\"Stack convolution layers per layer_cfg. Args: layer_cfg: list", "b]) # shape: [B, ?, ?, 2*d] class BahdanauAttention(tf.keras.Model): \"\"\"Bahdanau", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "feature w2: weights that process the memory state v: projection", "scalar \"\"\" def __init__(self, units): \"\"\"Initialize Bahdanau attention layer. Args:", "2 * sum * (n_layer_channel), activation=tf.nn.relu) return tf.split(all_params, [c *", "Version 2.0 (the \"License\"); # you may not use this", "== (batch_size, 1, hidden_size) hidden_with_time_axis = tf.expand_dims(hidden, 1) # score", "shape: [B, ?, 1, d] tile_layer = tf.keras.layers.Lambda( lambda inputs:", "layer to be FiLM'ed Returns: a tuple of tensors the", "def stack_dense_layer(layer_cfg): \"\"\"Stack Dense layers. Args: layer_cfg: list of integer", "tf.keras.layers.Lambda( lambda inputs: tf.tile(inputs[0], multiples=inputs[1])) a = tile_layer((a, [1, 1,", "\"\"\"Bahdanau Attention Layer. Attributes: w1: weights that process the feature", "# hidden_with_time_axis shape == (batch_size, 1, hidden_size) hidden_with_time_axis = tf.expand_dims(hidden,", "# a shape: [B, ?, 1, d] b = expand_dims_layer_2(b)", "(channel, kernel size, strides) padding: what kind of padding the", "update_fn(v_t, v_s) updates.append(update) return updates def vector_tensor_product(a, b): \"\"\"\"Returns keras", "for a single hidden layer. \"\"\" n_total = sum(n_layer_channel) *", "shape_layer(b) shape_numpy = b.get_shape() variable_length = shape[1] # variable_len =", "# features(CNN_encoder output) shape == (batch_size, 64, embedding_dim) # hidden", "= tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) a", "\"\"\"Update the target variables using exponential moving average. Specifically, v_s'", "GRU output, state = self.gru(x) return output, state def reset_state(self,", "update_fn(v1, v2): \"\"\"Update variables.\"\"\" # For not trainable variables do", "implied. # See the License for the specific language governing", "layers.append( tf.keras.layers.Conv2D( final_cfg[0], final_cfg[1], final_cfg[2], padding=padding)) return tf.keras.Sequential(layers) def stack_dense_layer(layer_cfg):", "exponential moving average. Specifically, v_s' = v_s * polyak_rate +", "super(GRUEnecoder, self).__init__() self._units = units self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim) self.gru", "embedding to be transformed n_layer_channel: a list of integers specifying", "under the Apache License, Version 2.0 (the \"License\"); # you", "embedding. Generate FiLM parameters from a sentence embedding. This method", "dense layers \"\"\" layers = [] for cfg in layer_cfg[:-1]:", "= v_s * polyak_rate + (1-polyak_rate) * v_t Args: source_variables:", "from a sentence embedding. Generate FiLM parameters from a sentence", "= tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) a = expand_dims_layer_1(a) # a shape:", "v_s) updates.append(update) return updates def vector_tensor_product(a, b): \"\"\"\"Returns keras layer", "Lint as: python3 # pylint: disable=invalid-name from __future__ import absolute_import", "self.W2 = tf.keras.layers.Dense(units) self.V = tf.keras.layers.Dense(1) def call(self, features, hidden):", "layer_cfg: list of integer tuples specifying the parameter each layer;", "v2) update = update_fn(v_t, v_s) updates.append(update) return updates def vector_tensor_product(a,", "shape: [B, ?, 1, d] b = expand_dims_layer_2(b) # a", "by applicable law or agreed to in writing, software #", "size of the dense layers \"\"\" super(BahdanauAttention, self).__init__() self.W1 =", "polyak_rate + (1-polyak_rate) * v_t Args: source_variables: the moving average", "embedding_dim) # hidden shape == (batch_size, hidden_size) # hidden_with_time_axis shape", "applying score to self.V attention_weights = tf.nn.softmax(self.V(score), axis=1) # context_vector", "language governing permissions and # limitations under the License. \"\"\"Utilities", "list of integer specifying the number of units at each", "that does the update \"\"\" updates = [] for (v_s,", "of the dense layers \"\"\" super(BahdanauAttention, self).__init__() self.W1 = tf.keras.layers.Dense(units)", "expand_dims_layer_2(b) # a shape: [B, ?, 1, d] tile_layer =", "limitations under the License. \"\"\"Utilities for Tensorflow 2.0. Partially adapted", "perfrom a outer product between a and b.\"\"\" # a", "size, strides) padding: what kind of padding the conv layers", "number of vocabulary \"\"\" super(GRUEnecoder, self).__init__() self._units = units self.embedding", "GRU layer \"\"\" def __init__(self, embedding_dim, units, vocab_size): \"\"\"Initialize the", "vocab_size: total number of vocabulary \"\"\" super(GRUEnecoder, self).__init__() self._units =", "tile_layer = tf.keras.layers.Lambda( lambda inputs: tf.tile(inputs[0], multiples=inputs[1])) a = tile_layer((a,", "through embedding == (batch_size, 1, embedding_dim) x = self.embedding(x) #", "under the License. \"\"\"Utilities for Tensorflow 2.0. Partially adapted from:", "activation=tf.nn.relu) return tf.split(all_params, [c * 2 for c in n_layer_channel],", "Specifically, v_s' = v_s * polyak_rate + (1-polyak_rate) * v_t", "python3 # pylint: disable=invalid-name from __future__ import absolute_import from __future__", "units at each layer Returns: the keras model with stacked", "Dense layers. Args: layer_cfg: list of integer specifying the number", "Tensorflow 2.0. Partially adapted from: https://www.tensorflow.org/tutorials/text/image_captioning \"\"\" # Lint as:", "def update_fn(v1, v2): \"\"\"Update variables.\"\"\" # For not trainable variables", "v2): \"\"\"Update variables.\"\"\" # For not trainable variables do hard", "be (channel, kernel size, strides) padding: what kind of padding", "to scalar \"\"\" def __init__(self, units): \"\"\"Initialize Bahdanau attention layer.", "Research Authors. # # Licensed under the Apache License, Version", "self).__init__() self._units = units self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim) self.gru =", "cfg in layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return tf.keras.Sequential(layers) def soft_variables_update(source_variables,", "== (batch_size, hidden_size) context_vector = attention_weights * features context_vector =", "\"\"\" super(BahdanauAttention, self).__init__() self.W1 = tf.keras.layers.Dense(units) self.W2 = tf.keras.layers.Dense(units) self.V", "of units of the memory state vocab_size: total number of", "model with stacked dense layers \"\"\" layers = [] for", "strides=cfg[2], activation=tf.nn.relu, padding=padding)) final_cfg = layer_cfg[-1] layers.append( tf.keras.layers.Conv2D( final_cfg[0], final_cfg[1],", "stacked dense layers \"\"\" layers = [] for cfg in", "tf.split(all_params, [c * 2 for c in n_layer_channel], 1) def", "not trainable variables do hard updates. return v1.assign(polyak_rate * v1", "FiLM parameters from a sentence embedding. This method assumes a", "tf.keras.layers.concatenate([a, b]) # shape: [B, ?, ?, 2*d] class BahdanauAttention(tf.keras.Model):", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return tf.keras.Sequential(layers) def soft_variables_update(source_variables, target_variables, polyak_rate=1.0): \"\"\"Update", "the GRU layer \"\"\" def __init__(self, embedding_dim, units, vocab_size): \"\"\"Initialize", "what kind of padding the conv layers use Returns: the", "keras layer that perfrom a outer product between a and", "Unless required by applicable law or agreed to in writing,", "tf.expand_dims(hidden, 1) # score shape == (batch_size, 64, hidden_size) score", "the parameter each layer; each tuple should be (channel, kernel", "each layer; each tuple should be (channel, kernel size, strides)", "sentenced embedding to be transformed n_layer_channel: a list of integers", "the specific language governing permissions and # limitations under the", "= layer_cfg[-1] layers.append( tf.keras.layers.Conv2D( final_cfg[0], final_cfg[1], final_cfg[2], padding=padding)) return tf.keras.Sequential(layers)", "hidden shape == (batch_size, hidden_size) # hidden_with_time_axis shape == (batch_size,", "of integer tuples specifying the parameter each layer; each tuple", "model with stacked conv layers \"\"\" layers = [] for", "* v2) update = update_fn(v_t, v_s) updates.append(update) return updates def", "number of units of the memory state vocab_size: total number", "shape: [B, ?, d], b shape: [B, ?, d] shape_layer", "applicable law or agreed to in writing, software # distributed", "variable_length, 1])) # b shape: [B, ?, ?, d] b", "tuples specifying the parameter each layer; each tuple should be", "tf.keras.layers.Conv2D( final_cfg[0], final_cfg[1], final_cfg[2], padding=padding)) return tf.keras.Sequential(layers) def stack_dense_layer(layer_cfg): \"\"\"Stack", "v1.assign(polyak_rate * v1 + (1 - polyak_rate) * v2) update", "tensorflow as tf def film_params(sentence_embedding, n_layer_channel): \"\"\"Generate FiLM parameters from", "== (batch_size, 64, hidden_size) score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis)) #", "# coding=utf-8 # Copyright 2022 The Google Research Authors. #", "layer that perfrom a outer product between a and b.\"\"\"", "that project score vector to scalar \"\"\" def __init__(self, units):", "# a shape: [B, ?, ?, d] b = tile_layer((b,", "a shape: [B, ?, 1, d] b = expand_dims_layer_2(b) #", "?, ?, d] b = tf.keras.layers.Permute((2, 1, 3))(b) # b", "== (batch_size, hidden_size) # hidden_with_time_axis shape == (batch_size, 1, hidden_size)", "from: https://www.tensorflow.org/tutorials/text/image_captioning \"\"\" # Lint as: python3 # pylint: disable=invalid-name", "return context_vector, attention_weights class GRUEnecoder(tf.keras.Model): \"\"\"TF2.0 GRE encoder. Attributes: embedding:", "gamma_i and beta_i for a single hidden layer. \"\"\" n_total", "in writing, software # distributed under the License is distributed", "__init__(self, embedding_dim, units, vocab_size): \"\"\"Initialize the GRU encoder. Args: embedding_dim:", "v_t.shape.assert_is_compatible_with(v_s.shape) def update_fn(v1, v2): \"\"\"Update variables.\"\"\" # For not trainable", "layers \"\"\" layers = [] for cfg in layer_cfg[:-1]: layers.append(", "= expand_dims_layer_1(a) # a shape: [B, ?, 1, d] b", "__future__ import absolute_import from __future__ import division import tensorflow as", "__init__(self, units): \"\"\"Initialize Bahdanau attention layer. Args: units: size of", "* polyak_rate + (1-polyak_rate) * v_t Args: source_variables: the moving", "tf.keras.layers.Dense(1) def call(self, features, hidden): # features(CNN_encoder output) shape ==", "a list of integers specifying how many channels are at", "def soft_variables_update(source_variables, target_variables, polyak_rate=1.0): \"\"\"Update the target variables using exponential", "Authors. # # Licensed under the Apache License, Version 2.0", "all gamma_i and beta_i for a single hidden layer. \"\"\"", "= tile_layer((b, [1, 1, variable_length, 1])) # b shape: [B,", "vector to the GRU output, state = self.gru(x) return output,", "tf.keras.layers.Dense(units) self.W2 = tf.keras.layers.Dense(units) self.V = tf.keras.layers.Dense(1) def call(self, features,", "a tensor containing batched sentenced embedding to be transformed n_layer_channel:", "moving average Returns: Operation that does the update \"\"\" updates", "# For not trainable variables do hard updates. return v1.assign(polyak_rate", "b.get_shape() variable_length = shape[1] # variable_len = ? expand_dims_layer_1 =", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "in zip(source_variables, target_variables): v_t.shape.assert_is_compatible_with(v_s.shape) def update_fn(v1, v2): \"\"\"Update variables.\"\"\" #", "License, Version 2.0 (the \"License\"); # you may not use", "def call(self, x, hidden): # x shape after passing through", "# You may obtain a copy of the License at", "the memory state v: projection layer that project score vector", "return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform') def call(self, x, hidden): # x shape", "units: size of the dense layers \"\"\" super(BahdanauAttention, self).__init__() self.W1", "film_params(sentence_embedding, n_layer_channel): \"\"\"Generate FiLM parameters from a sentence embedding. Generate", "tf.keras.Sequential(layers) def stack_dense_layer(layer_cfg): \"\"\"Stack Dense layers. Args: layer_cfg: list of", "== (batch_size, 1, embedding_dim) x = self.embedding(x) # passing the", "Returns: the keras model with stacked dense layers \"\"\" layers", "for Tensorflow 2.0. Partially adapted from: https://www.tensorflow.org/tutorials/text/image_captioning \"\"\" # Lint", "a batch dimension exists. Args: sentence_embedding: a tensor containing batched", "Args: units: size of the dense layers \"\"\" super(BahdanauAttention, self).__init__()", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "n_layer_channel], 1) def stack_conv_layer(layer_cfg, padding='same'): \"\"\"Stack convolution layers per layer_cfg.", "= tf.keras.layers.Permute((2, 1, 3))(b) # b shape: [B, ?, ?,", "conv layers use Returns: the keras model with stacked conv", "outer product between a and b.\"\"\" # a shape: [B,", "target_variables): v_t.shape.assert_is_compatible_with(v_s.shape) def update_fn(v1, v2): \"\"\"Update variables.\"\"\" # For not", "axis=1) return context_vector, attention_weights class GRUEnecoder(tf.keras.Model): \"\"\"TF2.0 GRE encoder. Attributes:", "units of the memory state vocab_size: total number of vocabulary", "target variables using exponential moving average. Specifically, v_s' = v_s", "kind of padding the conv layers use Returns: the keras", "= tf.keras.layers.Dense(units) self.V = tf.keras.layers.Dense(1) def call(self, features, hidden): #", "Layer. Attributes: w1: weights that process the feature w2: weights", "tf def film_params(sentence_embedding, n_layer_channel): \"\"\"Generate FiLM parameters from a sentence", "the License for the specific language governing permissions and #", "updates.append(update) return updates def vector_tensor_product(a, b): \"\"\"\"Returns keras layer that", "shape: [B, ?, d] shape_layer = tf.keras.layers.Lambda(tf.shape) shape = shape_layer(b)", "\"\"\"Initialize Bahdanau attention layer. Args: units: size of the dense", "= units self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim) self.gru = tf.keras.layers.GRU( self.units,", "recurrent_initializer='glorot_uniform') def call(self, x, hidden): # x shape after passing", "Apache License, Version 2.0 (the \"License\"); # you may not", "\"\"\"Update variables.\"\"\" # For not trainable variables do hard updates.", "project score vector to scalar \"\"\" def __init__(self, units): \"\"\"Initialize", "either express or implied. # See the License for the", "\"\"\" layers = [] for cfg in layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu))", "= [] for cfg in layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return", "attention_weights class GRUEnecoder(tf.keras.Model): \"\"\"TF2.0 GRE encoder. Attributes: embedding: word embedding", "call(self, x, hidden): # x shape after passing through embedding", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "1, d] b = expand_dims_layer_2(b) # a shape: [B, ?,", "v_s * polyak_rate + (1-polyak_rate) * v_t Args: source_variables: the", "permissions and # limitations under the License. \"\"\"Utilities for Tensorflow", "Generate FiLM parameters from a sentence embedding. This method assumes", "the dense layers \"\"\" super(BahdanauAttention, self).__init__() self.W1 = tf.keras.layers.Dense(units) self.W2", "parameters from a sentence embedding. This method assumes a batch", "be FiLM'ed Returns: a tuple of tensors the same length", "word emebdding units: number of units of the memory state", "channels are at each hidden layer to be FiLM'ed Returns:", "v: projection layer that project score vector to scalar \"\"\"", "self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim) self.gru = tf.keras.layers.GRU( self.units, return_sequences=True, return_state=True,", "[B, ?, ?, d] b = tf.keras.layers.Permute((2, 1, 3))(b) #", "shape == (batch_size, 64, 1) # you get 1 at", "integer tuples specifying the parameter each layer; each tuple should", "= expand_dims_layer_2(b) # a shape: [B, ?, 1, d] tile_layer", "?, d], b shape: [B, ?, d] shape_layer = tf.keras.layers.Lambda(tf.shape)", "= tf.keras.layers.Lambda( lambda inputs: tf.tile(inputs[0], multiples=inputs[1])) a = tile_layer((a, [1,", "tf.keras.layers.GRU( self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform') def call(self, x, hidden): #", "shape: [B, ?, ?, d] b = tile_layer((b, [1, 1,", "self.gru = tf.keras.layers.GRU( self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform') def call(self, x,", "polyak_rate: rate of moving average Returns: Operation that does the", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "last axis because you are applying score to self.V attention_weights", "moving average. Specifically, v_s' = v_s * polyak_rate + (1-polyak_rate)", "keras model with stacked conv layers \"\"\" layers = []", "sentence embedding. Generate FiLM parameters from a sentence embedding. This", "trainable variables do hard updates. return v1.assign(polyak_rate * v1 +", "== (batch_size, 64, 1) # you get 1 at the", "w2: weights that process the memory state v: projection layer", "disable=invalid-name from __future__ import absolute_import from __future__ import division import", "stack_dense_layer(layer_cfg): \"\"\"Stack Dense layers. Args: layer_cfg: list of integer specifying", "?, 1, d] b = expand_dims_layer_2(b) # a shape: [B,", "new observations polyak_rate: rate of moving average Returns: Operation that", "tile_layer((b, [1, 1, variable_length, 1])) # b shape: [B, ?,", "axis=1) # context_vector shape after sum == (batch_size, hidden_size) context_vector", "final_cfg[2], padding=padding)) return tf.keras.Sequential(layers) def stack_dense_layer(layer_cfg): \"\"\"Stack Dense layers. Args:", "hidden_with_time_axis = tf.expand_dims(hidden, 1) # score shape == (batch_size, 64,", "to be FiLM'ed Returns: a tuple of tensors the same", "activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return tf.keras.Sequential(layers) def soft_variables_update(source_variables, target_variables, polyak_rate=1.0): \"\"\"Update the", "For not trainable variables do hard updates. return v1.assign(polyak_rate *", "[B, ?, 1, d] tile_layer = tf.keras.layers.Lambda( lambda inputs: tf.tile(inputs[0],", "process the memory state v: projection layer that project score", "class BahdanauAttention(tf.keras.Model): \"\"\"Bahdanau Attention Layer. Attributes: w1: weights that process", "passing the concatenated vector to the GRU output, state =", "variables target_variables: the new observations polyak_rate: rate of moving average", "\"License\"); # you may not use this file except in", "tf.layers.dense(sentence_embedding, n_total) all_params = tf.keras.layers.Dense( 2 * sum * (n_layer_channel),", "parameters from a sentence embedding. Generate FiLM parameters from a", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "hidden layer to be FiLM'ed Returns: a tuple of tensors", "shape: [B, ?, ?, d] b = tf.keras.layers.Permute((2, 1, 3))(b)", "Args: sentence_embedding: a tensor containing batched sentenced embedding to be", "embedding_dim, units, vocab_size): \"\"\"Initialize the GRU encoder. Args: embedding_dim: dimension", "1, shape_numpy[-1])) expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) a = expand_dims_layer_1(a)", "convolution layers per layer_cfg. Args: layer_cfg: list of integer tuples", "# score shape == (batch_size, 64, hidden_size) score = tf.nn.tanh(self.W1(features)", "# distributed under the License is distributed on an \"AS", "?, d] return tf.keras.layers.concatenate([a, b]) # shape: [B, ?, ?,", "shape == (batch_size, 64, hidden_size) score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis))", "# Unless required by applicable law or agreed to in", "import division import tensorflow as tf def film_params(sentence_embedding, n_layer_channel): \"\"\"Generate", "how many channels are at each hidden layer to be", "d] b = expand_dims_layer_2(b) # a shape: [B, ?, 1,", "The Google Research Authors. # # Licensed under the Apache", "# Copyright 2022 The Google Research Authors. # # Licensed", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "FiLM parameters from a sentence embedding. Generate FiLM parameters from", "def vector_tensor_product(a, b): \"\"\"\"Returns keras layer that perfrom a outer", "?, d] b = tile_layer((b, [1, 1, variable_length, 1])) #", "of padding the conv layers use Returns: the keras model", "of vocabulary \"\"\" super(GRUEnecoder, self).__init__() self._units = units self.embedding =", "the moving average variables target_variables: the new observations polyak_rate: rate", "v1 + (1 - polyak_rate) * v2) update = update_fn(v_t,", "You may obtain a copy of the License at #", "and b.\"\"\" # a shape: [B, ?, d], b shape:", "each layer Returns: the keras model with stacked dense layers", "context_vector shape after sum == (batch_size, hidden_size) context_vector = attention_weights", "matrix gru: the GRU layer \"\"\" def __init__(self, embedding_dim, units,", "of the memory state vocab_size: total number of vocabulary \"\"\"", "list of integer tuples specifying the parameter each layer; each", "tf.keras.layers.Permute((2, 1, 3))(b) # b shape: [B, ?, ?, d]", "GRU encoder. Args: embedding_dim: dimension of word emebdding units: number", "a outer product between a and b.\"\"\" # a shape:", "the Apache License, Version 2.0 (the \"License\"); # you may", "the keras model with stacked conv layers \"\"\" layers =", "shape after sum == (batch_size, hidden_size) context_vector = attention_weights *", "units, vocab_size): \"\"\"Initialize the GRU encoder. Args: embedding_dim: dimension of", "= tf.keras.layers.GRU( self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform') def call(self, x, hidden):", "the License. \"\"\"Utilities for Tensorflow 2.0. Partially adapted from: https://www.tensorflow.org/tutorials/text/image_captioning", "tf.keras.Sequential(layers) def soft_variables_update(source_variables, target_variables, polyak_rate=1.0): \"\"\"Update the target variables using", "sum * (n_layer_channel), activation=tf.nn.relu) return tf.split(all_params, [c * 2 for", "Attributes: w1: weights that process the feature w2: weights that", "embedding_dim: dimension of word emebdding units: number of units of", "encoder. Attributes: embedding: word embedding matrix gru: the GRU layer", "from __future__ import absolute_import from __future__ import division import tensorflow", "= update_fn(v_t, v_s) updates.append(update) return updates def vector_tensor_product(a, b): \"\"\"\"Returns" ]
[ "INFO, DEBUG logger_factory = LoggerFactory(level=logging.INFO) LEVELS = { \"debug\": logging.DEBUG,", "2.0 (the \"License\"); # you may not use this file", "= logging.Formatter( \"%(asctime)s - '\" + str(self.device_key) + \"' -", "# limitations under the License. import logging from typing import", "not None: formatter = logging.Formatter( \"%(asctime)s - '\" + str(self.device_key)", "%(levelname)s [%(filename)s:%(lineno)s\" + \" - %(funcName)s()] - %(message)s\" ) if", "# Logging levels available: NOTSET, INFO, DEBUG logger_factory = LoggerFactory(level=logging.INFO)", "log_file self.loggers: List[logging.Logger] = [] def set_device_key(self, device_key: str) ->", "None: console_handler.setLevel(level) else: console_handler.setLevel(self.level) console_handler.setFormatter(formatter) logger.addHandler(console_handler) if self.log_file is not", "None) -> None: \"\"\" Set desired log level and designate", "# type: ignore \"\"\" Create a factory that will give", "console_handler.setLevel(level) else: console_handler.setLevel(self.level) console_handler.setFormatter(formatter) logger.addHandler(console_handler) if self.log_file is not None:", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "logger # Logging levels available: NOTSET, INFO, DEBUG logger_factory =", "None \"\"\" self.level = level self.device_key = None self.console =", "log file :type log_file: str or None \"\"\" if log_file", "not None: logger.setLevel(level) else: logger.setLevel(self.level) if self.device_key is not None:", "Override the log level :type level: int or None :returns:", "\"critical\": logging.CRITICAL, \"notset\": logging.NOTSET, } def logging_config(level: str, log_file: Optional[str]", "\"\"\" Return a ready to use logger instance. :param name:", "is not None: formatter = logging.Formatter( \"%(asctime)s - '\" +", "logger_factory.level = LEVELS[level] for logger in logger_factory.loggers: logger.setLevel(logger_factory.level) for handler", "\"%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s\" + \" - %(funcName)s()] - %(message)s\"", "use this file except in compliance with the License. #", "\"\"\" self.level = level self.device_key = None self.console = console", "logging_config(level: str, log_file: Optional[str] = None) -> None: \"\"\" Set", "will give loggers through calls to get_logger(). :param level: Set", "None self.console = console self.log_file = log_file self.loggers: List[logging.Logger] =", "log_file: Name of the log file to output to :type", "log_file: str or None \"\"\" if log_file is not None:", "logger \"\"\" logger = logging.getLogger(name) if level is not None:", "Optional class LoggerFactory: \"\"\"Factory for issuing ready to use loggers", ": debug, info, notset :type level: str :param log_file: path", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "# Copyright 2020 WolkAbout Technology s.r.o. # # Licensed under", "if LEVELS[level] == logger_factory.level: return logger_factory.level = LEVELS[level] for logger", "License. # You may obtain a copy of the License", "import List from typing import Optional class LoggerFactory: \"\"\"Factory for", "device_key def get_logger( self, name: str, level: Optional[int] = None", "console self.log_file = log_file self.loggers: List[logging.Logger] = [] def set_device_key(self,", "under the License is distributed on an \"AS IS\" BASIS,", "Set device key. :param device_key: Device key :type device_key: str", "= None self.console = console self.log_file = log_file self.loggers: List[logging.Logger]", "= { \"debug\": logging.DEBUG, \"info\": logging.INFO, \"warning\": logging.WARNING, \"error\": logging.ERROR,", "License for the specific language governing permissions and # limitations", "the log file to output to :type log_file: str or", "formatter = logging.Formatter( \"%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s\" + \" -", "other modules.\"\"\" def __init__(self, level=logging.INFO, console=True, log_file=None): # type: ignore", "def get_logger( self, name: str, level: Optional[int] = None )", "-> None: \"\"\" Set desired log level and designate a", "+ \"' - %(levelname)s [%(filename)s:%(lineno)s\" + \" - %(funcName)s()] -", "\"warning\": logging.WARNING, \"error\": logging.ERROR, \"critical\": logging.CRITICAL, \"notset\": logging.NOTSET, } def", "\"info\": logging.INFO, \"warning\": logging.WARNING, \"error\": logging.ERROR, \"critical\": logging.CRITICAL, \"notset\": logging.NOTSET,", "} def logging_config(level: str, log_file: Optional[str] = None) -> None:", "Optional[int] = None ) -> logging.Logger: \"\"\" Return a ready", ":param log_file: path to log file :type log_file: str or", "path to log file :type log_file: str or None \"\"\"", "level is not None: logger.setLevel(level) else: logger.setLevel(self.level) if self.device_key is", "\"\"\" Create a factory that will give loggers through calls", "modules.\"\"\" def __init__(self, level=logging.INFO, console=True, log_file=None): # type: ignore \"\"\"", "console :type console: bool or None :param log_file: Name of", "log_file: Optional[str] = None) -> None: \"\"\" Set desired log", "not None: file_handler = logging.FileHandler(self.log_file) if level is not None:", "logging level :type level: int or None :param console: Should", "logger :type name: str :param level: Override the log level", "name: str, level: Optional[int] = None ) -> logging.Logger: \"\"\"", "in compliance with the License. # You may obtain a", "NOTSET, INFO, DEBUG logger_factory = LoggerFactory(level=logging.INFO) LEVELS = { \"debug\":", "level not in LEVELS: print(f\"Invalid level '{level}'\") return if LEVELS[level]", "software # distributed under the License is distributed on an", "logger.setLevel(self.level) if self.device_key is not None: formatter = logging.Formatter( \"%(asctime)s", "self.log_file = log_file self.loggers: List[logging.Logger] = [] def set_device_key(self, device_key:", "logging.CRITICAL, \"notset\": logging.NOTSET, } def logging_config(level: str, log_file: Optional[str] =", "level :type level: int or None :returns: Logger instance :rtype:", "through calls to get_logger(). :param level: Set the desired logging", "= logging.Formatter( \"%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s\" + \" - %(funcName)s()]", "or None \"\"\" self.level = level self.device_key = None self.console", "self.level = level self.device_key = None self.console = console self.log_file", "\"' - %(levelname)s [%(filename)s:%(lineno)s\" + \" - %(funcName)s()] - %(message)s\"", "logging.NOTSET, } def logging_config(level: str, log_file: Optional[str] = None) ->", "Logging levels available: NOTSET, INFO, DEBUG logger_factory = LoggerFactory(level=logging.INFO) LEVELS", "'{level}'\") return if LEVELS[level] == logger_factory.level: return logger_factory.level = LEVELS[level]", "str or None \"\"\" if log_file is not None: logger_factory.log_file", "List[logging.Logger] = [] def set_device_key(self, device_key: str) -> None: \"\"\"", "desired log level and designate a log file. :param level:", "info, notset :type level: str :param log_file: path to log", "self.device_key = device_key def get_logger( self, name: str, level: Optional[int]", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "loggers in other modules.\"\"\" def __init__(self, level=logging.INFO, console=True, log_file=None): #", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "to in writing, software # distributed under the License is", "LEVELS[level] for logger in logger_factory.loggers: logger.setLevel(logger_factory.level) for handler in logger.handlers:", "is not None: console_handler.setLevel(level) else: console_handler.setLevel(self.level) console_handler.setFormatter(formatter) logger.addHandler(console_handler) if self.log_file", "# See the License for the specific language governing permissions", "file_handler.setLevel(level) else: file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) self.loggers.append(logger) return logger # Logging", "available: NOTSET, INFO, DEBUG logger_factory = LoggerFactory(level=logging.INFO) LEVELS = {", "if self.log_file is not None: file_handler = logging.FileHandler(self.log_file) if level", "else: file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) self.loggers.append(logger) return logger # Logging levels", ":returns: Logger instance :rtype: logger \"\"\" logger = logging.getLogger(name) if", "or agreed to in writing, software # distributed under the", "designate a log file. :param level: Available levels : debug,", "required by applicable law or agreed to in writing, software", ":type console: bool or None :param log_file: Name of the", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "= [] def set_device_key(self, device_key: str) -> None: \"\"\" Set", "for issuing ready to use loggers in other modules.\"\"\" def", "with the License. # You may obtain a copy of", "str, level: Optional[int] = None ) -> logging.Logger: \"\"\" Return", "or None \"\"\" if log_file is not None: logger_factory.log_file =", "+ \" - %(funcName)s()] - %(message)s\" ) else: formatter =", "Logger instance :rtype: logger \"\"\" logger = logging.getLogger(name) if level", "key :type device_key: str \"\"\" self.device_key = device_key def get_logger(", "None: logger.setLevel(level) else: logger.setLevel(self.level) if self.device_key is not None: formatter", "compliance with the License. # You may obtain a copy", "int or None :returns: Logger instance :rtype: logger \"\"\" logger", "agreed to in writing, software # distributed under the License", "LoggerFactory: \"\"\"Factory for issuing ready to use loggers in other", "get_logger( self, name: str, level: Optional[int] = None ) ->", "distributed under the License is distributed on an \"AS IS\"", "the License. import logging from typing import List from typing", ":type level: int or None :returns: Logger instance :rtype: logger", "-> None: \"\"\" Set device key. :param device_key: Device key", "logger_factory = LoggerFactory(level=logging.INFO) LEVELS = { \"debug\": logging.DEBUG, \"info\": logging.INFO,", "def __init__(self, level=logging.INFO, console=True, log_file=None): # type: ignore \"\"\" Create", "a factory that will give loggers through calls to get_logger().", "file. :param level: Available levels : debug, info, notset :type", ") if self.console: console_handler = logging.StreamHandler() if level is not", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "formatter = logging.Formatter( \"%(asctime)s - '\" + str(self.device_key) + \"'", "not use this file except in compliance with the License.", "writing, software # distributed under the License is distributed on", "not None: logger_factory.log_file = log_file if level not in LEVELS:", "None :param console: Should the log messages be outputted to", "if level is not None: file_handler.setLevel(level) else: file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler)", "you may not use this file except in compliance with", "get_logger(). :param level: Set the desired logging level :type level:", "level :type level: int or None :param console: Should the", "\"%(asctime)s - '\" + str(self.device_key) + \"' - %(levelname)s [%(filename)s:%(lineno)s\"", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "level: str :param log_file: path to log file :type log_file:", ":type log_file: str or None \"\"\" self.level = level self.device_key", "if level is not None: console_handler.setLevel(level) else: console_handler.setLevel(self.level) console_handler.setFormatter(formatter) logger.addHandler(console_handler)", "self.loggers.append(logger) return logger # Logging levels available: NOTSET, INFO, DEBUG", "\"\"\" self.device_key = device_key def get_logger( self, name: str, level:", "= logging.getLogger(name) if level is not None: logger.setLevel(level) else: logger.setLevel(self.level)", "__init__(self, level=logging.INFO, console=True, log_file=None): # type: ignore \"\"\" Create a", "calls to get_logger(). :param level: Set the desired logging level", "is not None: file_handler = logging.FileHandler(self.log_file) if level is not", "CONDITIONS OF ANY KIND, either express or implied. # See", "None: file_handler = logging.FileHandler(self.log_file) if level is not None: file_handler.setLevel(level)", "logging.FileHandler(self.log_file) if level is not None: file_handler.setLevel(level) else: file_handler.setLevel(self.level) file_handler.setFormatter(formatter)", "Copyright 2020 WolkAbout Technology s.r.o. # # Licensed under the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "\"debug\": logging.DEBUG, \"info\": logging.INFO, \"warning\": logging.WARNING, \"error\": logging.ERROR, \"critical\": logging.CRITICAL,", "WolkAbout Technology s.r.o. # # Licensed under the Apache License,", "is not None: file_handler.setLevel(level) else: file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) self.loggers.append(logger) return", "logging.DEBUG, \"info\": logging.INFO, \"warning\": logging.WARNING, \"error\": logging.ERROR, \"critical\": logging.CRITICAL, \"notset\":", "or None :returns: Logger instance :rtype: logger \"\"\" logger =", "class LoggerFactory: \"\"\"Factory for issuing ready to use loggers in", "str :param log_file: path to log file :type log_file: str", "log_file=None): # type: ignore \"\"\" Create a factory that will", "typing import Optional class LoggerFactory: \"\"\"Factory for issuing ready to", "outputted to the console :type console: bool or None :param", "file_handler = logging.FileHandler(self.log_file) if level is not None: file_handler.setLevel(level) else:", "Name of the logger :type name: str :param level: Override", "log_file if level not in LEVELS: print(f\"Invalid level '{level}'\") return", "\"\"\"LoggerFactory Module.\"\"\" # Copyright 2020 WolkAbout Technology s.r.o. # #", ":param console: Should the log messages be outputted to the", "Available levels : debug, info, notset :type level: str :param", "LoggerFactory(level=logging.INFO) LEVELS = { \"debug\": logging.DEBUG, \"info\": logging.INFO, \"warning\": logging.WARNING,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "logger_factory.level: return logger_factory.level = LEVELS[level] for logger in logger_factory.loggers: logger.setLevel(logger_factory.level)", "int or None :param console: Should the log messages be", "{ \"debug\": logging.DEBUG, \"info\": logging.INFO, \"warning\": logging.WARNING, \"error\": logging.ERROR, \"critical\":", "the License is distributed on an \"AS IS\" BASIS, #", "level and designate a log file. :param level: Available levels", "= log_file if level not in LEVELS: print(f\"Invalid level '{level}'\")", "the desired logging level :type level: int or None :param", "= console self.log_file = log_file self.loggers: List[logging.Logger] = [] def", "if self.device_key is not None: formatter = logging.Formatter( \"%(asctime)s -", "console: bool or None :param log_file: Name of the log", "= logging.StreamHandler() if level is not None: console_handler.setLevel(level) else: console_handler.setLevel(self.level)", "governing permissions and # limitations under the License. import logging", "- %(message)s\" ) if self.console: console_handler = logging.StreamHandler() if level", "return logger_factory.level = LEVELS[level] for logger in logger_factory.loggers: logger.setLevel(logger_factory.level) for", "Technology s.r.o. # # Licensed under the Apache License, Version", "output to :type log_file: str or None \"\"\" self.level =", "self.device_key = None self.console = console self.log_file = log_file self.loggers:", "set_device_key(self, device_key: str) -> None: \"\"\" Set device key. :param", "law or agreed to in writing, software # distributed under", "LEVELS[level] == logger_factory.level: return logger_factory.level = LEVELS[level] for logger in", "level=logging.INFO, console=True, log_file=None): # type: ignore \"\"\" Create a factory", ":param device_key: Device key :type device_key: str \"\"\" self.device_key =", "log_file: str or None \"\"\" self.level = level self.device_key =", "of the log file to output to :type log_file: str", "%(funcName)s()] - %(message)s\" ) else: formatter = logging.Formatter( \"%(asctime)s -", "if level not in LEVELS: print(f\"Invalid level '{level}'\") return if", "instance. :param name: Name of the logger :type name: str", "logging.ERROR, \"critical\": logging.CRITICAL, \"notset\": logging.NOTSET, } def logging_config(level: str, log_file:", "\" - %(funcName)s()] - %(message)s\" ) if self.console: console_handler =", "\" - %(funcName)s()] - %(message)s\" ) else: formatter = logging.Formatter(", "may obtain a copy of the License at # #", "- '\" + str(self.device_key) + \"' - %(levelname)s [%(filename)s:%(lineno)s\" +", "\"notset\": logging.NOTSET, } def logging_config(level: str, log_file: Optional[str] = None)", "= LEVELS[level] for logger in logger_factory.loggers: logger.setLevel(logger_factory.level) for handler in", "level self.device_key = None self.console = console self.log_file = log_file", "logger.addHandler(file_handler) self.loggers.append(logger) return logger # Logging levels available: NOTSET, INFO,", "logger = logging.getLogger(name) if level is not None: logger.setLevel(level) else:", "for logger in logger_factory.loggers: logger.setLevel(logger_factory.level) for handler in logger.handlers: handler.setLevel(logger_factory.level)", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "None: file_handler.setLevel(level) else: file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) self.loggers.append(logger) return logger #", ":type log_file: str or None \"\"\" if log_file is not", "may not use this file except in compliance with the", "if log_file is not None: logger_factory.log_file = log_file if level", "level: Available levels : debug, info, notset :type level: str", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "this file except in compliance with the License. # You", "messages be outputted to the console :type console: bool or", "or None :param console: Should the log messages be outputted", "and designate a log file. :param level: Available levels :", ":type level: int or None :param console: Should the log", "ready to use loggers in other modules.\"\"\" def __init__(self, level=logging.INFO,", "str, log_file: Optional[str] = None) -> None: \"\"\" Set desired", "device key. :param device_key: Device key :type device_key: str \"\"\"", "limitations under the License. import logging from typing import List", "[%(filename)s:%(lineno)s\" + \" - %(funcName)s()] - %(message)s\" ) if self.console:", "import Optional class LoggerFactory: \"\"\"Factory for issuing ready to use", "logging.Formatter( \"%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s\" + \" - %(funcName)s()] -", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "Create a factory that will give loggers through calls to", "# # Licensed under the Apache License, Version 2.0 (the", "None: \"\"\" Set device key. :param device_key: Device key :type", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "to :type log_file: str or None \"\"\" self.level = level", "log level and designate a log file. :param level: Available", "ready to use logger instance. :param name: Name of the", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "not None: file_handler.setLevel(level) else: file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) self.loggers.append(logger) return logger", "- %(message)s\" ) else: formatter = logging.Formatter( \"%(asctime)s - %(levelname)s", "a log file. :param level: Available levels : debug, info,", "not None: console_handler.setLevel(level) else: console_handler.setLevel(self.level) console_handler.setFormatter(formatter) logger.addHandler(console_handler) if self.log_file is", "logger_factory.log_file = log_file if level not in LEVELS: print(f\"Invalid level", "self.console: console_handler = logging.StreamHandler() if level is not None: console_handler.setLevel(level)", "\"\"\" if log_file is not None: logger_factory.log_file = log_file if", "of the logger :type name: str :param level: Override the", "None :returns: Logger instance :rtype: logger \"\"\" logger = logging.getLogger(name)", "to log file :type log_file: str or None \"\"\" if", "language governing permissions and # limitations under the License. import", "= logging.FileHandler(self.log_file) if level is not None: file_handler.setLevel(level) else: file_handler.setLevel(self.level)", "else: logger.setLevel(self.level) if self.device_key is not None: formatter = logging.Formatter(", "that will give loggers through calls to get_logger(). :param level:", "be outputted to the console :type console: bool or None", "def logging_config(level: str, log_file: Optional[str] = None) -> None: \"\"\"", "if level is not None: logger.setLevel(level) else: logger.setLevel(self.level) if self.device_key", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "use loggers in other modules.\"\"\" def __init__(self, level=logging.INFO, console=True, log_file=None):", "- %(funcName)s()] - %(message)s\" ) if self.console: console_handler = logging.StreamHandler()", "logging.INFO, \"warning\": logging.WARNING, \"error\": logging.ERROR, \"critical\": logging.CRITICAL, \"notset\": logging.NOTSET, }", "give loggers through calls to get_logger(). :param level: Set the", "Should the log messages be outputted to the console :type", "level: Override the log level :type level: int or None", "or implied. # See the License for the specific language", "Optional[str] = None) -> None: \"\"\" Set desired log level", "the log messages be outputted to the console :type console:", "\"\"\" logger = logging.getLogger(name) if level is not None: logger.setLevel(level)", "self.device_key is not None: formatter = logging.Formatter( \"%(asctime)s - '\"", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", ":param level: Set the desired logging level :type level: int", "key. :param device_key: Device key :type device_key: str \"\"\" self.device_key", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "LEVELS: print(f\"Invalid level '{level}'\") return if LEVELS[level] == logger_factory.level: return", "import logging from typing import List from typing import Optional", "ignore \"\"\" Create a factory that will give loggers through", "= LoggerFactory(level=logging.INFO) LEVELS = { \"debug\": logging.DEBUG, \"info\": logging.INFO, \"warning\":", "name: Name of the logger :type name: str :param level:", "log file to output to :type log_file: str or None", "str or None \"\"\" self.level = level self.device_key = None", "self.log_file is not None: file_handler = logging.FileHandler(self.log_file) if level is", ":param level: Override the log level :type level: int or", "(the \"License\"); # you may not use this file except", "None: logger_factory.log_file = log_file if level not in LEVELS: print(f\"Invalid", "# you may not use this file except in compliance", "logging.Formatter( \"%(asctime)s - '\" + str(self.device_key) + \"' - %(levelname)s", "console_handler = logging.StreamHandler() if level is not None: console_handler.setLevel(level) else:", ":rtype: logger \"\"\" logger = logging.getLogger(name) if level is not", "License. import logging from typing import List from typing import", "None: formatter = logging.Formatter( \"%(asctime)s - '\" + str(self.device_key) +", "= level self.device_key = None self.console = console self.log_file =", "-> logging.Logger: \"\"\" Return a ready to use logger instance.", "level is not None: console_handler.setLevel(level) else: console_handler.setLevel(self.level) console_handler.setFormatter(formatter) logger.addHandler(console_handler) if", "# # Unless required by applicable law or agreed to", "= None) -> None: \"\"\" Set desired log level and", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "- %(funcName)s()] - %(message)s\" ) else: formatter = logging.Formatter( \"%(asctime)s", "else: console_handler.setLevel(self.level) console_handler.setFormatter(formatter) logger.addHandler(console_handler) if self.log_file is not None: file_handler", "str(self.device_key) + \"' - %(levelname)s [%(filename)s:%(lineno)s\" + \" - %(funcName)s()]", "under the License. import logging from typing import List from", "= device_key def get_logger( self, name: str, level: Optional[int] =", "console_handler.setFormatter(formatter) logger.addHandler(console_handler) if self.log_file is not None: file_handler = logging.FileHandler(self.log_file)", "Version 2.0 (the \"License\"); # you may not use this", "%(funcName)s()] - %(message)s\" ) if self.console: console_handler = logging.StreamHandler() if", "debug, info, notset :type level: str :param log_file: path to", "logging.getLogger(name) if level is not None: logger.setLevel(level) else: logger.setLevel(self.level) if", ") else: formatter = logging.Formatter( \"%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s\" +", "a ready to use logger instance. :param name: Name of", "type: ignore \"\"\" Create a factory that will give loggers", "implied. # See the License for the specific language governing", "[] def set_device_key(self, device_key: str) -> None: \"\"\" Set device", "under the Apache License, Version 2.0 (the \"License\"); # you", "name: str :param level: Override the log level :type level:", "console=True, log_file=None): # type: ignore \"\"\" Create a factory that", "'\" + str(self.device_key) + \"' - %(levelname)s [%(filename)s:%(lineno)s\" + \"", "and # limitations under the License. import logging from typing", "levels : debug, info, notset :type level: str :param log_file:", "LEVELS = { \"debug\": logging.DEBUG, \"info\": logging.INFO, \"warning\": logging.WARNING, \"error\":", "by applicable law or agreed to in writing, software #", "to use loggers in other modules.\"\"\" def __init__(self, level=logging.INFO, console=True,", "bool or None :param log_file: Name of the log file", "level: Optional[int] = None ) -> logging.Logger: \"\"\" Return a", "desired logging level :type level: int or None :param console:", "None: \"\"\" Set desired log level and designate a log", "else: formatter = logging.Formatter( \"%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s\" + \"", "DEBUG logger_factory = LoggerFactory(level=logging.INFO) LEVELS = { \"debug\": logging.DEBUG, \"info\":", "device_key: str \"\"\" self.device_key = device_key def get_logger( self, name:", "Module.\"\"\" # Copyright 2020 WolkAbout Technology s.r.o. # # Licensed", "str \"\"\" self.device_key = device_key def get_logger( self, name: str,", "\"\"\" Set desired log level and designate a log file.", "return if LEVELS[level] == logger_factory.level: return logger_factory.level = LEVELS[level] for", "is not None: logger_factory.log_file = log_file if level not in", "is not None: logger.setLevel(level) else: logger.setLevel(self.level) if self.device_key is not", "the console :type console: bool or None :param log_file: Name", "%(message)s\" ) if self.console: console_handler = logging.StreamHandler() if level is", "self, name: str, level: Optional[int] = None ) -> logging.Logger:", "<reponame>Wolkabout/WolkConnect-Python-<gh_stars>1-10 \"\"\"LoggerFactory Module.\"\"\" # Copyright 2020 WolkAbout Technology s.r.o. #", "level: Set the desired logging level :type level: int or", "level '{level}'\") return if LEVELS[level] == logger_factory.level: return logger_factory.level =", ":param name: Name of the logger :type name: str :param", "+ str(self.device_key) + \"' - %(levelname)s [%(filename)s:%(lineno)s\" + \" -", "levels available: NOTSET, INFO, DEBUG logger_factory = LoggerFactory(level=logging.INFO) LEVELS =", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Unless required by applicable law or agreed to in writing,", "device_key: str) -> None: \"\"\" Set device key. :param device_key:", "self.loggers: List[logging.Logger] = [] def set_device_key(self, device_key: str) -> None:", "str) -> None: \"\"\" Set device key. :param device_key: Device", "file_handler.setFormatter(formatter) logger.addHandler(file_handler) self.loggers.append(logger) return logger # Logging levels available: NOTSET,", "the specific language governing permissions and # limitations under the", "instance :rtype: logger \"\"\" logger = logging.getLogger(name) if level is", "return logger # Logging levels available: NOTSET, INFO, DEBUG logger_factory", "Return a ready to use logger instance. :param name: Name", "applicable law or agreed to in writing, software # distributed", "the log level :type level: int or None :returns: Logger", ":type device_key: str \"\"\" self.device_key = device_key def get_logger( self,", "from typing import Optional class LoggerFactory: \"\"\"Factory for issuing ready", "logging.Logger: \"\"\" Return a ready to use logger instance. :param", "Set the desired logging level :type level: int or None", "log_file: path to log file :type log_file: str or None", "to get_logger(). :param level: Set the desired logging level :type", "file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) self.loggers.append(logger) return logger # Logging levels available:", "in writing, software # distributed under the License is distributed", "permissions and # limitations under the License. import logging from", "Device key :type device_key: str \"\"\" self.device_key = device_key def", "level: int or None :param console: Should the log messages", "logger.addHandler(console_handler) if self.log_file is not None: file_handler = logging.FileHandler(self.log_file) if", "logger instance. :param name: Name of the logger :type name:", "= None ) -> logging.Logger: \"\"\" Return a ready to", "typing import List from typing import Optional class LoggerFactory: \"\"\"Factory", "[%(filename)s:%(lineno)s\" + \" - %(funcName)s()] - %(message)s\" ) else: formatter", "logging.WARNING, \"error\": logging.ERROR, \"critical\": logging.CRITICAL, \"notset\": logging.NOTSET, } def logging_config(level:", "level is not None: file_handler.setLevel(level) else: file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) self.loggers.append(logger)", "None ) -> logging.Logger: \"\"\" Return a ready to use", "factory that will give loggers through calls to get_logger(). :param", "logging from typing import List from typing import Optional class", "in other modules.\"\"\" def __init__(self, level=logging.INFO, console=True, log_file=None): # type:", "to use logger instance. :param name: Name of the logger", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "def set_device_key(self, device_key: str) -> None: \"\"\" Set device key.", "License, Version 2.0 (the \"License\"); # you may not use", "# You may obtain a copy of the License at", "s.r.o. # # Licensed under the Apache License, Version 2.0", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "%(message)s\" ) else: formatter = logging.Formatter( \"%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s\"", ":type level: str :param log_file: path to log file :type", "log file. :param level: Available levels : debug, info, notset", ":type name: str :param level: Override the log level :type", "the License for the specific language governing permissions and #", "List from typing import Optional class LoggerFactory: \"\"\"Factory for issuing", "file to output to :type log_file: str or None \"\"\"", "if self.console: console_handler = logging.StreamHandler() if level is not None:", "Apache License, Version 2.0 (the \"License\"); # you may not", "the logger :type name: str :param level: Override the log", "log level :type level: int or None :returns: Logger instance", "either express or implied. # See the License for the", "in LEVELS: print(f\"Invalid level '{level}'\") return if LEVELS[level] == logger_factory.level:", "%(levelname)s [%(filename)s:%(lineno)s\" + \" - %(funcName)s()] - %(message)s\" ) else:", ") -> logging.Logger: \"\"\" Return a ready to use logger", "logger.setLevel(level) else: logger.setLevel(self.level) if self.device_key is not None: formatter =", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "log messages be outputted to the console :type console: bool", "print(f\"Invalid level '{level}'\") return if LEVELS[level] == logger_factory.level: return logger_factory.level", "Name of the log file to output to :type log_file:", "+ \" - %(funcName)s()] - %(message)s\" ) if self.console: console_handler", "from typing import List from typing import Optional class LoggerFactory:", "== logger_factory.level: return logger_factory.level = LEVELS[level] for logger in logger_factory.loggers:", "str :param level: Override the log level :type level: int", "self.console = console self.log_file = log_file self.loggers: List[logging.Logger] = []", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "to the console :type console: bool or None :param log_file:", "\"error\": logging.ERROR, \"critical\": logging.CRITICAL, \"notset\": logging.NOTSET, } def logging_config(level: str,", "console: Should the log messages be outputted to the console", "- %(levelname)s [%(filename)s:%(lineno)s\" + \" - %(funcName)s()] - %(message)s\" )", "log_file is not None: logger_factory.log_file = log_file if level not", "None :param log_file: Name of the log file to output", "\"\"\"Factory for issuing ready to use loggers in other modules.\"\"\"", "logging.StreamHandler() if level is not None: console_handler.setLevel(level) else: console_handler.setLevel(self.level) console_handler.setFormatter(formatter)", "None \"\"\" if log_file is not None: logger_factory.log_file = log_file", "2020 WolkAbout Technology s.r.o. # # Licensed under the Apache", "or None :param log_file: Name of the log file to", "\"License\"); # you may not use this file except in", "file :type log_file: str or None \"\"\" if log_file is", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "level: int or None :returns: Logger instance :rtype: logger \"\"\"", "# distributed under the License is distributed on an \"AS", "= log_file self.loggers: List[logging.Logger] = [] def set_device_key(self, device_key: str)", "# Unless required by applicable law or agreed to in", "not in LEVELS: print(f\"Invalid level '{level}'\") return if LEVELS[level] ==", "to output to :type log_file: str or None \"\"\" self.level", "issuing ready to use loggers in other modules.\"\"\" def __init__(self,", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "You may obtain a copy of the License at #", "use logger instance. :param name: Name of the logger :type", "Set desired log level and designate a log file. :param", "loggers through calls to get_logger(). :param level: Set the desired", ":param level: Available levels : debug, info, notset :type level:", "the Apache License, Version 2.0 (the \"License\"); # you may", "device_key: Device key :type device_key: str \"\"\" self.device_key = device_key", "\"\"\" Set device key. :param device_key: Device key :type device_key:", ":param log_file: Name of the log file to output to", "notset :type level: str :param log_file: path to log file", "console_handler.setLevel(self.level) console_handler.setFormatter(formatter) logger.addHandler(console_handler) if self.log_file is not None: file_handler =" ]
[ "import datetime import pendulum import boto3 from botocore.exceptions import ClientError", "gzip.compress(json.dumps(data).encode('utf-8')) headers = {'content-type': 'application/json', 'content-encoding': 'gzip'} url = base_url", "ClientError as exc: raise exc else: text = \"I can't", "lambda_handler(event: dict, context: dict) -> dict: log = Log.setup(name='logger') aws_settings", "= datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try: token = telegram_settings.access_token base_url", "+ \"/sendMessage\" requests.post(url=url, data=data, headers=headers) except Exception as exc: log.error(msg=exc)", "sorry mate!\" data = {\"text\": text, \"chat_id\": chat_id} data =", "\"/sendMessage\" requests.post(url=url, data=data, headers=headers) except Exception as exc: log.error(msg=exc) finally:", "= json.loads(event[\"body\"]) chat_id = data[\"message\"][\"chat\"][\"id\"] if chat_id == telegram_settings.chat_id: client", "telegram_settings.chat_id: client = boto3.client('s3') bucket = aws_settings.raw_bucket root_path = aws_settings.root_path", "boto3 from botocore.exceptions import ClientError from util.log import Log from", "aws_settings = AWSSettings() telegram_settings = TelegramSettings() timezone = pendulum.timezone('America/Sao_Paulo') date", "bucket, f\"{date}/{timestamp}.json\") except ClientError as exc: raise exc else: text", "mate!\" data = {\"text\": text, \"chat_id\": chat_id} data = gzip.compress(json.dumps(data).encode('utf-8'))", "import gzip import requests from datetime import datetime import pendulum", "settings.telegram_settings import TelegramSettings def lambda_handler(event: dict, context: dict) -> dict:", "import json import gzip import requests from datetime import datetime", "dict, context: dict) -> dict: log = Log.setup(name='logger') aws_settings =", "root_path = aws_settings.root_path try: with open(f\"{root_path}/{timestamp}.json\", mode='w', encoding='utf8') as fp:", "import TelegramSettings def lambda_handler(event: dict, context: dict) -> dict: log", "= gzip.compress(json.dumps(data).encode('utf-8')) headers = {'content-type': 'application/json', 'content-encoding': 'gzip'} url =", "util.log import Log from settings.aws_settings import AWSSettings from settings.telegram_settings import", "botocore.exceptions import ClientError from util.log import Log from settings.aws_settings import", "AWSSettings() telegram_settings = TelegramSettings() timezone = pendulum.timezone('America/Sao_Paulo') date = datetime.now(tz=timezone).strftime('%Y-%m-%d')", "exc: raise exc else: text = \"I can't talk to", "ClientError from util.log import Log from settings.aws_settings import AWSSettings from", "= telegram_settings.access_token base_url = f\"https://api.telegram.org/bot{token}\" data = json.loads(event[\"body\"]) chat_id =", "chat_id} data = gzip.compress(json.dumps(data).encode('utf-8')) headers = {'content-type': 'application/json', 'content-encoding': 'gzip'}", "dict: log = Log.setup(name='logger') aws_settings = AWSSettings() telegram_settings = TelegramSettings()", "def lambda_handler(event: dict, context: dict) -> dict: log = Log.setup(name='logger')", "requests from datetime import datetime import pendulum import boto3 from", "dict) -> dict: log = Log.setup(name='logger') aws_settings = AWSSettings() telegram_settings", "requests.post(url=url, data=data, headers=headers) except Exception as exc: log.error(msg=exc) finally: return", "else: text = \"I can't talk to strangers, sorry mate!\"", "encoding='utf8') as fp: json.dump(data, fp) client.upload_file(f\"{root_path}/{timestamp}.json\", bucket, f\"{date}/{timestamp}.json\") except ClientError", "pendulum import boto3 from botocore.exceptions import ClientError from util.log import", "timezone = pendulum.timezone('America/Sao_Paulo') date = datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try:", "from util.log import Log from settings.aws_settings import AWSSettings from settings.telegram_settings", "= aws_settings.raw_bucket root_path = aws_settings.root_path try: with open(f\"{root_path}/{timestamp}.json\", mode='w', encoding='utf8')", "data[\"message\"][\"chat\"][\"id\"] if chat_id == telegram_settings.chat_id: client = boto3.client('s3') bucket =", "TelegramSettings def lambda_handler(event: dict, context: dict) -> dict: log =", "import requests from datetime import datetime import pendulum import boto3", "headers = {'content-type': 'application/json', 'content-encoding': 'gzip'} url = base_url +", "try: token = telegram_settings.access_token base_url = f\"https://api.telegram.org/bot{token}\" data = json.loads(event[\"body\"])", "gzip import requests from datetime import datetime import pendulum import", "fp: json.dump(data, fp) client.upload_file(f\"{root_path}/{timestamp}.json\", bucket, f\"{date}/{timestamp}.json\") except ClientError as exc:", "bucket = aws_settings.raw_bucket root_path = aws_settings.root_path try: with open(f\"{root_path}/{timestamp}.json\", mode='w',", "as fp: json.dump(data, fp) client.upload_file(f\"{root_path}/{timestamp}.json\", bucket, f\"{date}/{timestamp}.json\") except ClientError as", "= base_url + \"/sendMessage\" requests.post(url=url, data=data, headers=headers) except Exception as", "= aws_settings.root_path try: with open(f\"{root_path}/{timestamp}.json\", mode='w', encoding='utf8') as fp: json.dump(data,", "settings.aws_settings import AWSSettings from settings.telegram_settings import TelegramSettings def lambda_handler(event: dict,", "if chat_id == telegram_settings.chat_id: client = boto3.client('s3') bucket = aws_settings.raw_bucket", "AWSSettings from settings.telegram_settings import TelegramSettings def lambda_handler(event: dict, context: dict)", "import pendulum import boto3 from botocore.exceptions import ClientError from util.log", "aws_settings.raw_bucket root_path = aws_settings.root_path try: with open(f\"{root_path}/{timestamp}.json\", mode='w', encoding='utf8') as", "exc else: text = \"I can't talk to strangers, sorry", "datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try: token = telegram_settings.access_token base_url =", "-> dict: log = Log.setup(name='logger') aws_settings = AWSSettings() telegram_settings =", "as exc: raise exc else: text = \"I can't talk", "'application/json', 'content-encoding': 'gzip'} url = base_url + \"/sendMessage\" requests.post(url=url, data=data,", "telegram_settings = TelegramSettings() timezone = pendulum.timezone('America/Sao_Paulo') date = datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp", "can't talk to strangers, sorry mate!\" data = {\"text\": text,", "except ClientError as exc: raise exc else: text = \"I", "= {\"text\": text, \"chat_id\": chat_id} data = gzip.compress(json.dumps(data).encode('utf-8')) headers =", "datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try: token = telegram_settings.access_token base_url = f\"https://api.telegram.org/bot{token}\" data =", "boto3.client('s3') bucket = aws_settings.raw_bucket root_path = aws_settings.root_path try: with open(f\"{root_path}/{timestamp}.json\",", "= TelegramSettings() timezone = pendulum.timezone('America/Sao_Paulo') date = datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp =", "data = gzip.compress(json.dumps(data).encode('utf-8')) headers = {'content-type': 'application/json', 'content-encoding': 'gzip'} url", "log = Log.setup(name='logger') aws_settings = AWSSettings() telegram_settings = TelegramSettings() timezone", "{'content-type': 'application/json', 'content-encoding': 'gzip'} url = base_url + \"/sendMessage\" requests.post(url=url,", "json import gzip import requests from datetime import datetime import", "datetime import pendulum import boto3 from botocore.exceptions import ClientError from", "from botocore.exceptions import ClientError from util.log import Log from settings.aws_settings", "'gzip'} url = base_url + \"/sendMessage\" requests.post(url=url, data=data, headers=headers) except", "client = boto3.client('s3') bucket = aws_settings.raw_bucket root_path = aws_settings.root_path try:", "f\"{date}/{timestamp}.json\") except ClientError as exc: raise exc else: text =", "import ClientError from util.log import Log from settings.aws_settings import AWSSettings", "json.dump(data, fp) client.upload_file(f\"{root_path}/{timestamp}.json\", bucket, f\"{date}/{timestamp}.json\") except ClientError as exc: raise", "{\"text\": text, \"chat_id\": chat_id} data = gzip.compress(json.dumps(data).encode('utf-8')) headers = {'content-type':", "= Log.setup(name='logger') aws_settings = AWSSettings() telegram_settings = TelegramSettings() timezone =", "import boto3 from botocore.exceptions import ClientError from util.log import Log", "pendulum.timezone('America/Sao_Paulo') date = datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try: token =", "== telegram_settings.chat_id: client = boto3.client('s3') bucket = aws_settings.raw_bucket root_path =", "chat_id = data[\"message\"][\"chat\"][\"id\"] if chat_id == telegram_settings.chat_id: client = boto3.client('s3')", "import AWSSettings from settings.telegram_settings import TelegramSettings def lambda_handler(event: dict, context:", "f\"https://api.telegram.org/bot{token}\" data = json.loads(event[\"body\"]) chat_id = data[\"message\"][\"chat\"][\"id\"] if chat_id ==", "import Log from settings.aws_settings import AWSSettings from settings.telegram_settings import TelegramSettings", "context: dict) -> dict: log = Log.setup(name='logger') aws_settings = AWSSettings()", "Log.setup(name='logger') aws_settings = AWSSettings() telegram_settings = TelegramSettings() timezone = pendulum.timezone('America/Sao_Paulo')", "to strangers, sorry mate!\" data = {\"text\": text, \"chat_id\": chat_id}", "from settings.aws_settings import AWSSettings from settings.telegram_settings import TelegramSettings def lambda_handler(event:", "date = datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try: token = telegram_settings.access_token", "text, \"chat_id\": chat_id} data = gzip.compress(json.dumps(data).encode('utf-8')) headers = {'content-type': 'application/json',", "TelegramSettings() timezone = pendulum.timezone('America/Sao_Paulo') date = datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S')", "timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try: token = telegram_settings.access_token base_url = f\"https://api.telegram.org/bot{token}\"", "base_url = f\"https://api.telegram.org/bot{token}\" data = json.loads(event[\"body\"]) chat_id = data[\"message\"][\"chat\"][\"id\"] if", "try: with open(f\"{root_path}/{timestamp}.json\", mode='w', encoding='utf8') as fp: json.dump(data, fp) client.upload_file(f\"{root_path}/{timestamp}.json\",", "token = telegram_settings.access_token base_url = f\"https://api.telegram.org/bot{token}\" data = json.loads(event[\"body\"]) chat_id", "text = \"I can't talk to strangers, sorry mate!\" data", "\"I can't talk to strangers, sorry mate!\" data = {\"text\":", "'content-encoding': 'gzip'} url = base_url + \"/sendMessage\" requests.post(url=url, data=data, headers=headers)", "= boto3.client('s3') bucket = aws_settings.raw_bucket root_path = aws_settings.root_path try: with", "talk to strangers, sorry mate!\" data = {\"text\": text, \"chat_id\":", "= AWSSettings() telegram_settings = TelegramSettings() timezone = pendulum.timezone('America/Sao_Paulo') date =", "\"chat_id\": chat_id} data = gzip.compress(json.dumps(data).encode('utf-8')) headers = {'content-type': 'application/json', 'content-encoding':", "from datetime import datetime import pendulum import boto3 from botocore.exceptions", "open(f\"{root_path}/{timestamp}.json\", mode='w', encoding='utf8') as fp: json.dump(data, fp) client.upload_file(f\"{root_path}/{timestamp}.json\", bucket, f\"{date}/{timestamp}.json\")", "with open(f\"{root_path}/{timestamp}.json\", mode='w', encoding='utf8') as fp: json.dump(data, fp) client.upload_file(f\"{root_path}/{timestamp}.json\", bucket,", "data = {\"text\": text, \"chat_id\": chat_id} data = gzip.compress(json.dumps(data).encode('utf-8')) headers", "mode='w', encoding='utf8') as fp: json.dump(data, fp) client.upload_file(f\"{root_path}/{timestamp}.json\", bucket, f\"{date}/{timestamp}.json\") except", "= \"I can't talk to strangers, sorry mate!\" data =", "= pendulum.timezone('America/Sao_Paulo') date = datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try: token", "aws_settings.root_path try: with open(f\"{root_path}/{timestamp}.json\", mode='w', encoding='utf8') as fp: json.dump(data, fp)", "Log from settings.aws_settings import AWSSettings from settings.telegram_settings import TelegramSettings def", "= data[\"message\"][\"chat\"][\"id\"] if chat_id == telegram_settings.chat_id: client = boto3.client('s3') bucket", "data = json.loads(event[\"body\"]) chat_id = data[\"message\"][\"chat\"][\"id\"] if chat_id == telegram_settings.chat_id:", "= f\"https://api.telegram.org/bot{token}\" data = json.loads(event[\"body\"]) chat_id = data[\"message\"][\"chat\"][\"id\"] if chat_id", "datetime import datetime import pendulum import boto3 from botocore.exceptions import", "fp) client.upload_file(f\"{root_path}/{timestamp}.json\", bucket, f\"{date}/{timestamp}.json\") except ClientError as exc: raise exc", "url = base_url + \"/sendMessage\" requests.post(url=url, data=data, headers=headers) except Exception", "base_url + \"/sendMessage\" requests.post(url=url, data=data, headers=headers) except Exception as exc:", "telegram_settings.access_token base_url = f\"https://api.telegram.org/bot{token}\" data = json.loads(event[\"body\"]) chat_id = data[\"message\"][\"chat\"][\"id\"]", "json.loads(event[\"body\"]) chat_id = data[\"message\"][\"chat\"][\"id\"] if chat_id == telegram_settings.chat_id: client =", "client.upload_file(f\"{root_path}/{timestamp}.json\", bucket, f\"{date}/{timestamp}.json\") except ClientError as exc: raise exc else:", "raise exc else: text = \"I can't talk to strangers,", "strangers, sorry mate!\" data = {\"text\": text, \"chat_id\": chat_id} data", "from settings.telegram_settings import TelegramSettings def lambda_handler(event: dict, context: dict) ->", "= datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try: token = telegram_settings.access_token base_url = f\"https://api.telegram.org/bot{token}\" data", "= {'content-type': 'application/json', 'content-encoding': 'gzip'} url = base_url + \"/sendMessage\"", "data=data, headers=headers) except Exception as exc: log.error(msg=exc) finally: return dict(statusCode=\"200\")", "chat_id == telegram_settings.chat_id: client = boto3.client('s3') bucket = aws_settings.raw_bucket root_path" ]
[ "site_stat: site_stat.record = last_rec site_stat.save() else: SiteStat.objects.create(user=user, record=last_rec) #raise Exception(last_rec.event)", "rec.uri uri = uri.replace('/ru/', '/').replace('/en/', '/') if (uri == '/'):", "'POST'): uri = rec.uri.split('?')[0] else: uri = rec.uri uri =", "= [] page = '{} {}'.format(rec.method, uri) if not page", "== 301): return None if 'favicon.ico' in rec.uri or '/static/'", "a_app = list(filter(lambda x: '/{}/'.format(x) in uri, APPS)) if not", "file entry last = datetime.min site_stat = None if SiteStat.objects.filter(user=user.id).exists():", "= _('total different').capitalize() + ' IP' TOTAL_LOG = _('total log", "file records. The site applications that users have visited and", "len(records) # Save last processed log record last_rec = None", "records = AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG] += len(records) # Save last processed", "= records[0] if site_stat: site_stat.record = last_rec site_stat.save() else: SiteStat.objects.create(user=user,", "the last previously processed log file entry last = datetime.min", "x: '/{}/'.format(x) in uri, APPS)) if not a_app: continue app", "about their IP addresses will be shown. \"\"\" TOTAL_IP =", "will be shown. \"\"\" TOTAL_IP = _('total different').capitalize() + '", "records. The site applications that users have visited and information", "Determining the access to the site application a_app = list(filter(lambda", "if not a_app: continue app = a_app[0] if not app", "if 'favicon.ico' in rec.uri or '/static/' in rec.uri or '/jsi18n/'", "valid_uri(rec): if (rec.status >= 400) or (rec.status == 301): return", "record=last_rec) #raise Exception(last_rec.event) apps = {} for rec in records:", "record last_rec = None if (len(records) > 0): last_rec =", "return cnt.most_common(), apps def valid_uri(rec): if (rec.status >= 400) or", "'/jsi18n/' in rec.uri or '/photo/get_mini/' in rec.uri: return None if", "apps[app][host]: apps[app][host].append(page) return cnt.most_common(), apps def valid_uri(rec): if (rec.status >=", "user.id).get() if site_stat.record and site_stat.record.event: last = site_stat.record.event # New", "the access to the site application a_app = list(filter(lambda x:", "rec.uri or '/jsi18n/' in rec.uri or '/photo/get_mini/' in rec.uri: return", "IPInfo, AccessLog, SiteStat from v2_hier.utils import APPS def get_site_stat(user): \"\"\"Processing", "host = str(rec.host.info()) #raise Exception('aaa = ', aaa) if not", "'/{}/'.format(x) in uri, APPS)) if not a_app: continue app =", "'/').replace('/en/', '/') if (uri == '/'): return None return uri", "if not page in apps[app][host]: apps[app][host].append(page) return cnt.most_common(), apps def", "'/photo/get_mini/' in rec.uri: return None if ('/?' in rec.uri) and", "The site applications that users have visited and information about", "a_app[0] if not app in apps: apps[app] = {} host", "uri: continue # Determining the access to the site application", "processed log file entry last = datetime.min site_stat = None", "= rec.uri.split('?')[0] else: uri = rec.uri uri = uri.replace('/ru/', '/').replace('/en/',", "and information about their IP addresses will be shown. \"\"\"", "!= 'POST'): uri = rec.uri.split('?')[0] else: uri = rec.uri uri", "rec.uri or '/photo/get_mini/' in rec.uri: return None if ('/?' in", "cnt[NEW_LOG] += len(records) # Save last processed log record last_rec", "v2_hier.utils import APPS def get_site_stat(user): \"\"\"Processing a new portion of", "SiteStat.objects.filter(user=user.id).exists(): site_stat = SiteStat.objects.filter(user = user.id).get() if site_stat.record and site_stat.record.event:", "\"\"\" TOTAL_IP = _('total different').capitalize() + ' IP' TOTAL_LOG =", "log file entry last = datetime.min site_stat = None if", "processed log record last_rec = None if (len(records) > 0):", "return None if 'favicon.ico' in rec.uri or '/static/' in rec.uri", "different').capitalize() + ' IP' TOTAL_LOG = _('total log records').capitalize() NEW_LOG", "have visited and information about their IP addresses will be", "apps = {} for rec in records: uri = valid_uri(rec)", "of site visits.\"\"\" import collections from datetime import datetime from", "uri = valid_uri(rec) if not uri: continue # Determining the", "collections from datetime import datetime from functools import reduce from", "that users have visited and information about their IP addresses", "records').capitalize() NEW_LOG = _('new log records').capitalize() cnt = collections.Counter() cnt[TOTAL_IP]", "apps: apps[app] = {} host = str(rec.host.info()) #raise Exception('aaa =", "apps[app] = {} host = str(rec.host.info()) #raise Exception('aaa = ',", "site applications that users have visited and information about their", "in rec.uri or '/jsi18n/' in rec.uri or '/photo/get_mini/' in rec.uri:", "functools import reduce from django.utils.translation import gettext_lazy as _ from", "rec.uri) and (rec.method != 'POST'): uri = rec.uri.split('?')[0] else: uri", "0): last_rec = records[0] if site_stat: site_stat.record = last_rec site_stat.save()", "site_stat.record and site_stat.record.event: last = site_stat.record.event # New records records", "', aaa) if not host in apps[app]: apps[app][host] = []", "not page in apps[app][host]: apps[app][host].append(page) return cnt.most_common(), apps def valid_uri(rec):", "a_app: continue app = a_app[0] if not app in apps:", "log records').capitalize() NEW_LOG = _('new log records').capitalize() cnt = collections.Counter()", "app = a_app[0] if not app in apps: apps[app] =", "if ('/?' in rec.uri) and (rec.method != 'POST'): uri =", "as _ from hier.models import IPInfo, AccessLog, SiteStat from v2_hier.utils", "statistics of site visits.\"\"\" import collections from datetime import datetime", "not uri: continue # Determining the access to the site", "from django.utils.translation import gettext_lazy as _ from hier.models import IPInfo,", "their IP addresses will be shown. \"\"\" TOTAL_IP = _('total", "site_stat = SiteStat.objects.filter(user = user.id).get() if site_stat.record and site_stat.record.event: last", "None if (len(records) > 0): last_rec = records[0] if site_stat:", "site_stat.save() else: SiteStat.objects.create(user=user, record=last_rec) #raise Exception(last_rec.event) apps = {} for", "continue app = a_app[0] if not app in apps: apps[app]", "(rec.method != 'POST'): uri = rec.uri.split('?')[0] else: uri = rec.uri", "app in apps: apps[app] = {} host = str(rec.host.info()) #raise", "SiteStat from v2_hier.utils import APPS def get_site_stat(user): \"\"\"Processing a new", "= AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG] += len(records) # Save last processed log", "site visits.\"\"\" import collections from datetime import datetime from functools", "a new portion of log file records. The site applications", "apps[app][host].append(page) return cnt.most_common(), apps def valid_uri(rec): if (rec.status >= 400)", "_ from hier.models import IPInfo, AccessLog, SiteStat from v2_hier.utils import", "last_rec site_stat.save() else: SiteStat.objects.create(user=user, record=last_rec) #raise Exception(last_rec.event) apps = {}", "if SiteStat.objects.filter(user=user.id).exists(): site_stat = SiteStat.objects.filter(user = user.id).get() if site_stat.record and", "if (rec.status >= 400) or (rec.status == 301): return None", "uri = uri.replace('/ru/', '/').replace('/en/', '/') if (uri == '/'): return", "import collections from datetime import datetime from functools import reduce", "TOTAL_IP = _('total different').capitalize() + ' IP' TOTAL_LOG = _('total", "information about their IP addresses will be shown. \"\"\" TOTAL_IP", "of log file records. The site applications that users have", "last = datetime.min site_stat = None if SiteStat.objects.filter(user=user.id).exists(): site_stat =", "not a_app: continue app = a_app[0] if not app in", "'/static/' in rec.uri or '/jsi18n/' in rec.uri or '/photo/get_mini/' in", "str(rec.host.info()) #raise Exception('aaa = ', aaa) if not host in", "continue # Determining the access to the site application a_app", "visited and information about their IP addresses will be shown.", "and site_stat.record.event: last = site_stat.record.event # New records records =", "log records').capitalize() cnt = collections.Counter() cnt[TOTAL_IP] = len(IPInfo.objects.all()) cnt[TOTAL_LOG] =", "records records = AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG] += len(records) # Save last", "visits.\"\"\" import collections from datetime import datetime from functools import", "valid_uri(rec) if not uri: continue # Determining the access to", "in rec.uri: return None if ('/?' in rec.uri) and (rec.method", "log record last_rec = None if (len(records) > 0): last_rec", "_('total log records').capitalize() NEW_LOG = _('new log records').capitalize() cnt =", "last_rec = records[0] if site_stat: site_stat.record = last_rec site_stat.save() else:", "= '{} {}'.format(rec.method, uri) if not page in apps[app][host]: apps[app][host].append(page)", "'favicon.ico' in rec.uri or '/static/' in rec.uri or '/jsi18n/' in", "(len(records) > 0): last_rec = records[0] if site_stat: site_stat.record =", "page = '{} {}'.format(rec.method, uri) if not page in apps[app][host]:", "rec.uri or '/static/' in rec.uri or '/jsi18n/' in rec.uri or", "aaa) if not host in apps[app]: apps[app][host] = [] page", "= site_stat.record.event # New records records = AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG] +=", "return None if ('/?' in rec.uri) and (rec.method != 'POST'):", "= a_app[0] if not app in apps: apps[app] = {}", "users have visited and information about their IP addresses will", "Save last processed log record last_rec = None if (len(records)", "not app in apps: apps[app] = {} host = str(rec.host.info())", "+ ' IP' TOTAL_LOG = _('total log records').capitalize() NEW_LOG =", "to the site application a_app = list(filter(lambda x: '/{}/'.format(x) in", "import IPInfo, AccessLog, SiteStat from v2_hier.utils import APPS def get_site_stat(user):", "log file records. The site applications that users have visited", "from functools import reduce from django.utils.translation import gettext_lazy as _", "> 0): last_rec = records[0] if site_stat: site_stat.record = last_rec", "records: uri = valid_uri(rec) if not uri: continue # Determining", "if not host in apps[app]: apps[app][host] = [] page =", "= None if (len(records) > 0): last_rec = records[0] if", "in records: uri = valid_uri(rec) if not uri: continue #", "gettext_lazy as _ from hier.models import IPInfo, AccessLog, SiteStat from", "= str(rec.host.info()) #raise Exception('aaa = ', aaa) if not host", "django.utils.translation import gettext_lazy as _ from hier.models import IPInfo, AccessLog,", "get_site_stat(user): \"\"\"Processing a new portion of log file records. The", "or '/jsi18n/' in rec.uri or '/photo/get_mini/' in rec.uri: return None", "APPS def get_site_stat(user): \"\"\"Processing a new portion of log file", "len(AccessLog.objects.all()) #Determining the last previously processed log file entry last", "'{} {}'.format(rec.method, uri) if not page in apps[app][host]: apps[app][host].append(page) return", "datetime import datetime from functools import reduce from django.utils.translation import", "= last_rec site_stat.save() else: SiteStat.objects.create(user=user, record=last_rec) #raise Exception(last_rec.event) apps =", "for rec in records: uri = valid_uri(rec) if not uri:", "application a_app = list(filter(lambda x: '/{}/'.format(x) in uri, APPS)) if", "uri, APPS)) if not a_app: continue app = a_app[0] if", "cnt[TOTAL_IP] = len(IPInfo.objects.all()) cnt[TOTAL_LOG] = len(AccessLog.objects.all()) #Determining the last previously", "#Determining the last previously processed log file entry last =", "if not uri: continue # Determining the access to the", "import reduce from django.utils.translation import gettext_lazy as _ from hier.models", "access to the site application a_app = list(filter(lambda x: '/{}/'.format(x)", "APPS)) if not a_app: continue app = a_app[0] if not", "(rec.status == 301): return None if 'favicon.ico' in rec.uri or", "uri = rec.uri uri = uri.replace('/ru/', '/').replace('/en/', '/') if (uri", "= len(IPInfo.objects.all()) cnt[TOTAL_LOG] = len(AccessLog.objects.all()) #Determining the last previously processed", "shown. \"\"\" TOTAL_IP = _('total different').capitalize() + ' IP' TOTAL_LOG", "in rec.uri or '/static/' in rec.uri or '/jsi18n/' in rec.uri", "collections.Counter() cnt[TOTAL_IP] = len(IPInfo.objects.all()) cnt[TOTAL_LOG] = len(AccessLog.objects.all()) #Determining the last", ">= 400) or (rec.status == 301): return None if 'favicon.ico'", "SiteStat.objects.filter(user = user.id).get() if site_stat.record and site_stat.record.event: last = site_stat.record.event", "in uri, APPS)) if not a_app: continue app = a_app[0]", "= None if SiteStat.objects.filter(user=user.id).exists(): site_stat = SiteStat.objects.filter(user = user.id).get() if", "uri = rec.uri.split('?')[0] else: uri = rec.uri uri = uri.replace('/ru/',", "\"\"\"Processing a new portion of log file records. The site", "None if 'favicon.ico' in rec.uri or '/static/' in rec.uri or", "= user.id).get() if site_stat.record and site_stat.record.event: last = site_stat.record.event #", "uri) if not page in apps[app][host]: apps[app][host].append(page) return cnt.most_common(), apps", "cnt.most_common(), apps def valid_uri(rec): if (rec.status >= 400) or (rec.status", "[] page = '{} {}'.format(rec.method, uri) if not page in", "def valid_uri(rec): if (rec.status >= 400) or (rec.status == 301):", "None if ('/?' in rec.uri) and (rec.method != 'POST'): uri", "records[0] if site_stat: site_stat.record = last_rec site_stat.save() else: SiteStat.objects.create(user=user, record=last_rec)", "previously processed log file entry last = datetime.min site_stat =", "Exception('aaa = ', aaa) if not host in apps[app]: apps[app][host]", "apps[app]: apps[app][host] = [] page = '{} {}'.format(rec.method, uri) if", "import gettext_lazy as _ from hier.models import IPInfo, AccessLog, SiteStat", "len(IPInfo.objects.all()) cnt[TOTAL_LOG] = len(AccessLog.objects.all()) #Determining the last previously processed log", "= SiteStat.objects.filter(user = user.id).get() if site_stat.record and site_stat.record.event: last =", "{} for rec in records: uri = valid_uri(rec) if not", "or '/static/' in rec.uri or '/jsi18n/' in rec.uri or '/photo/get_mini/'", "# Save last processed log record last_rec = None if", "# New records records = AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG] += len(records) #", "TOTAL_LOG = _('total log records').capitalize() NEW_LOG = _('new log records').capitalize()", "= _('new log records').capitalize() cnt = collections.Counter() cnt[TOTAL_IP] = len(IPInfo.objects.all())", "or '/photo/get_mini/' in rec.uri: return None if ('/?' in rec.uri)", "else: uri = rec.uri uri = uri.replace('/ru/', '/').replace('/en/', '/') if", "import datetime from functools import reduce from django.utils.translation import gettext_lazy", "\"\"\"Collecting statistics of site visits.\"\"\" import collections from datetime import", "not host in apps[app]: apps[app][host] = [] page = '{}", "page in apps[app][host]: apps[app][host].append(page) return cnt.most_common(), apps def valid_uri(rec): if", "host in apps[app]: apps[app][host] = [] page = '{} {}'.format(rec.method,", "or (rec.status == 301): return None if 'favicon.ico' in rec.uri", "('/?' in rec.uri) and (rec.method != 'POST'): uri = rec.uri.split('?')[0]", "SiteStat.objects.create(user=user, record=last_rec) #raise Exception(last_rec.event) apps = {} for rec in", "addresses will be shown. \"\"\" TOTAL_IP = _('total different').capitalize() +", "New records records = AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG] += len(records) # Save", "# Determining the access to the site application a_app =", "in rec.uri) and (rec.method != 'POST'): uri = rec.uri.split('?')[0] else:", "None if SiteStat.objects.filter(user=user.id).exists(): site_stat = SiteStat.objects.filter(user = user.id).get() if site_stat.record", "Exception(last_rec.event) apps = {} for rec in records: uri =", "apps def valid_uri(rec): if (rec.status >= 400) or (rec.status ==", "applications that users have visited and information about their IP", "= ', aaa) if not host in apps[app]: apps[app][host] =", "{} host = str(rec.host.info()) #raise Exception('aaa = ', aaa) if", "in rec.uri or '/photo/get_mini/' in rec.uri: return None if ('/?'", "rec.uri: return None if ('/?' in rec.uri) and (rec.method !=", "datetime.min site_stat = None if SiteStat.objects.filter(user=user.id).exists(): site_stat = SiteStat.objects.filter(user =", "if site_stat.record and site_stat.record.event: last = site_stat.record.event # New records", "from v2_hier.utils import APPS def get_site_stat(user): \"\"\"Processing a new portion", "last processed log record last_rec = None if (len(records) >", "= valid_uri(rec) if not uri: continue # Determining the access", "= uri.replace('/ru/', '/').replace('/en/', '/') if (uri == '/'): return None", "datetime from functools import reduce from django.utils.translation import gettext_lazy as", "301): return None if 'favicon.ico' in rec.uri or '/static/' in", "IP addresses will be shown. \"\"\" TOTAL_IP = _('total different').capitalize()", "in apps[app]: apps[app][host] = [] page = '{} {}'.format(rec.method, uri)", "if (len(records) > 0): last_rec = records[0] if site_stat: site_stat.record", "= {} for rec in records: uri = valid_uri(rec) if", "rec in records: uri = valid_uri(rec) if not uri: continue", "import APPS def get_site_stat(user): \"\"\"Processing a new portion of log", "_('total different').capitalize() + ' IP' TOTAL_LOG = _('total log records').capitalize()", "def get_site_stat(user): \"\"\"Processing a new portion of log file records.", "+= len(records) # Save last processed log record last_rec =", "from datetime import datetime from functools import reduce from django.utils.translation", "entry last = datetime.min site_stat = None if SiteStat.objects.filter(user=user.id).exists(): site_stat", "if site_stat: site_stat.record = last_rec site_stat.save() else: SiteStat.objects.create(user=user, record=last_rec) #raise", "site application a_app = list(filter(lambda x: '/{}/'.format(x) in uri, APPS))", "cnt[TOTAL_LOG] = len(AccessLog.objects.all()) #Determining the last previously processed log file", "IP' TOTAL_LOG = _('total log records').capitalize() NEW_LOG = _('new log", "else: SiteStat.objects.create(user=user, record=last_rec) #raise Exception(last_rec.event) apps = {} for rec", "records').capitalize() cnt = collections.Counter() cnt[TOTAL_IP] = len(IPInfo.objects.all()) cnt[TOTAL_LOG] = len(AccessLog.objects.all())", "the site application a_app = list(filter(lambda x: '/{}/'.format(x) in uri,", "= {} host = str(rec.host.info()) #raise Exception('aaa = ', aaa)", "portion of log file records. The site applications that users", "site_stat.record.event: last = site_stat.record.event # New records records = AccessLog.objects.filter(event__gt=last).order_by('-event')", "{}'.format(rec.method, uri) if not page in apps[app][host]: apps[app][host].append(page) return cnt.most_common(),", "apps[app][host] = [] page = '{} {}'.format(rec.method, uri) if not", "in apps[app][host]: apps[app][host].append(page) return cnt.most_common(), apps def valid_uri(rec): if (rec.status", "last_rec = None if (len(records) > 0): last_rec = records[0]", "list(filter(lambda x: '/{}/'.format(x) in uri, APPS)) if not a_app: continue", "#raise Exception(last_rec.event) apps = {} for rec in records: uri", "AccessLog, SiteStat from v2_hier.utils import APPS def get_site_stat(user): \"\"\"Processing a", "site_stat = None if SiteStat.objects.filter(user=user.id).exists(): site_stat = SiteStat.objects.filter(user = user.id).get()", "from hier.models import IPInfo, AccessLog, SiteStat from v2_hier.utils import APPS", "new portion of log file records. The site applications that", "if not app in apps: apps[app] = {} host =", "= rec.uri uri = uri.replace('/ru/', '/').replace('/en/', '/') if (uri ==", "_('new log records').capitalize() cnt = collections.Counter() cnt[TOTAL_IP] = len(IPInfo.objects.all()) cnt[TOTAL_LOG]", "= datetime.min site_stat = None if SiteStat.objects.filter(user=user.id).exists(): site_stat = SiteStat.objects.filter(user", "400) or (rec.status == 301): return None if 'favicon.ico' in", "be shown. \"\"\" TOTAL_IP = _('total different').capitalize() + ' IP'", "NEW_LOG = _('new log records').capitalize() cnt = collections.Counter() cnt[TOTAL_IP] =", "rec.uri.split('?')[0] else: uri = rec.uri uri = uri.replace('/ru/', '/').replace('/en/', '/')", "= len(AccessLog.objects.all()) #Determining the last previously processed log file entry", "(rec.status >= 400) or (rec.status == 301): return None if", "uri.replace('/ru/', '/').replace('/en/', '/') if (uri == '/'): return None return", "hier.models import IPInfo, AccessLog, SiteStat from v2_hier.utils import APPS def", "cnt = collections.Counter() cnt[TOTAL_IP] = len(IPInfo.objects.all()) cnt[TOTAL_LOG] = len(AccessLog.objects.all()) #Determining", "' IP' TOTAL_LOG = _('total log records').capitalize() NEW_LOG = _('new", "and (rec.method != 'POST'): uri = rec.uri.split('?')[0] else: uri =", "reduce from django.utils.translation import gettext_lazy as _ from hier.models import", "site_stat.record = last_rec site_stat.save() else: SiteStat.objects.create(user=user, record=last_rec) #raise Exception(last_rec.event) apps", "in apps: apps[app] = {} host = str(rec.host.info()) #raise Exception('aaa", "= list(filter(lambda x: '/{}/'.format(x) in uri, APPS)) if not a_app:", "last previously processed log file entry last = datetime.min site_stat", "AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG] += len(records) # Save last processed log record", "#raise Exception('aaa = ', aaa) if not host in apps[app]:", "= _('total log records').capitalize() NEW_LOG = _('new log records').capitalize() cnt", "= collections.Counter() cnt[TOTAL_IP] = len(IPInfo.objects.all()) cnt[TOTAL_LOG] = len(AccessLog.objects.all()) #Determining the", "last = site_stat.record.event # New records records = AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG]", "site_stat.record.event # New records records = AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG] += len(records)" ]
[ "no master server.\"\"\" try: resources = boto3.client(\"cloudformation\").describe_stack_resource( StackName=stack_name, LogicalResourceId=\"MasterServer\" )", "{0}. This is probably a bug on our end. \"", "the given EFS file system id. :param efs_fs_id: EFS file", "error(\"Failed to get cluster {0} username.\".format(cluster_name)) except ClientError as e:", "client to use to verify stack status :return: True if", "if the instance type cannot be found or the pricing", "\"m5.4xlarge\"] } } } :param region: AWS Region :param feature:", "\"Please submit an issue {1}\".format(feature, PCLUSTER_ISSUES_LINK) ) return supported_features def", "one.\"\"\" try: latest = json.loads(urllib.request.urlopen(\"https://pypi.python.org/pypi/aws-parallelcluster/json\").read())[ \"info\" ][\"version\"] if get_installed_version() <", "supported by a feature, for example. { \"Features\": { \"efa\":", "arguments to method :return: generator with boto3 results \"\"\" client", "for a stack in the status: {0}\".format(stack_status)) elif stack_status in", "{ \"efa\": { \"instances\": [\"c5n.18xlarge\", \"p3dn.24xlarge\", \"i3en.24xlarge\"], \"baseos\": [\"alinux\", \"centos7\"],", "os.path.relpath(os.path.join(root, file), start=path)) file_out.seek(0) return file_out def upload_resources_artifacts(bucket_name, root): \"\"\"", "to the stderr if fail_on_error is true.\"\"\" if fail_on_error: sys.exit(\"ERROR:", "try: features = _get_json_from_s3(region, \"features/feature_whitelist.json\") supported_features = features.get(\"Features\").get(feature) except (ValueError,", "which we want to know the supported os :return: a", "IP Address of the MasterServer. :param stack_name: The name of", "\"slurm\", \"torque\"] }, \"batch\": { \"instances\": [\"r3.8xlarge\", ..., \"m5.4xlarge\"] }", "_get_json_from_s3(region, \"instances/instances.json\") vcpus = int(instances[instance_type][\"vcpus\"]) except (KeyError, ValueError, ClientError): vcpus", "bucket.\"\"\" region = get_region() s3_suffix = \".cn\" if region.startswith(\"cn\") else", "pass def warn(message): \"\"\"Print a warning message.\"\"\" print(\"WARNING: {0}\".format(message)) def", "stack_result.get(\"StackStatus\") valid_status = [\"CREATE_COMPLETE\", \"UPDATE_COMPLETE\", \"UPDATE_ROLLBACK_COMPLETE\"] invalid_status = [\"DELETE_COMPLETE\", \"DELETE_IN_PROGRESS\"]", "for. :return: the number of vcpus or -1 if the", "res)): bucket.upload_fileobj(zip_dir(os.path.join(root, res)), \"%s/artifacts.zip\" % res) elif os.path.isfile(os.path.join(root, res)): bucket.upload_file(os.path.join(root,", "state != \"running\" or ip_address is None: error(\"MasterServer: %s\\nCannot get", "\"\" while status == \"CREATE_IN_PROGRESS\": status = get_stack(stack_name, cfn_client).get(\"StackStatus\") events", "(\"Status: %s - %s\" % (events.get(\"LogicalResourceId\"), events.get(\"ResourceStatus\"))).ljust( 80 ) sys.stdout.write(\"\\r%s\"", "error( \"Failed validate {0}. This is probably a bug on", "= logging.getLogger(__name__) PCLUSTER_STACK_PREFIX = \"parallelcluster-\" PCLUSTER_ISSUES_LINK = \"https://github.com/aws/aws-parallelcluster/issues\" def get_stack_name(cluster_name):", "\"\"\"Get AWS_DEFAULT_REGION from the environment.\"\"\" return os.environ.get(\"AWS_DEFAULT_REGION\") def get_partition(): \"\"\"Get", "= cfn.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] stack_status = stack_result.get(\"StackStatus\") valid_status = [\"CREATE_COMPLETE\", \"UPDATE_COMPLETE\", \"UPDATE_ROLLBACK_COMPLETE\"]", "to delete bucket %s. Please delete it manually.\" % bucket_name)", "uploaded to $bucket_name. :param bucket_name: name of the S3 bucket", "License for the specific language governing permissions and # limitations", "EC2, independent by the region.\"\"\" return boto3.client(\"ec2\").meta.service_model.shape_for(\"InstanceType\").enum def get_master_server_id(stack_name): \"\"\"Return", "independent by the region.\"\"\" return boto3.client(\"ec2\").meta.service_model.shape_for(\"InstanceType\").enum def get_master_server_id(stack_name): \"\"\"Return the", "Reserved. # # Licensed under the Apache License, Version 2.0", "archive is created in memory and a file handler is", "to archive. :return file handler pointing to the compressed archive.", "vcpus def get_supported_os(scheduler): \"\"\" Return a tuple of the os", "client :return: the Stack data type \"\"\" try: if not", "sys.exit(\"ERROR: {0}\".format(message)) else: print(\"ERROR: {0}\".format(message)) def get_cfn_param(params, key_name): \"\"\" Get", "the environment.\"\"\" return \"aws-us-gov\" if get_region().startswith(\"us-gov\") else \"aws\" def paginate_boto3(method,", "License is located at # # http://aws.amazon.com/apache2.0/ # # or", "output_key), None) def get_stack(stack_name, cfn_client=None): \"\"\" Get the output for", "Stack Output. :param stack_outputs: Cloudformation Stack Outputs :param output_key: Output", "of strings of the supported scheduler \"\"\" return \"sge\", \"torque\",", "file), os.path.relpath(os.path.join(root, file), start=path)) file_out.seek(0) return file_out def upload_resources_artifacts(bucket_name, root):", "get cluster {0} username.\".format(cluster_name)) except ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) return", "**kwargs): \"\"\" Return a generator for a boto3 call, this", "feature \"\"\" try: features = _get_json_from_s3(region, \"features/feature_whitelist.json\") supported_features = features.get(\"Features\").get(feature)", "if no master server.\"\"\" try: resources = boto3.client(\"cloudformation\").describe_stack_resource( StackName=stack_name, LogicalResourceId=\"MasterServer\"", "os :return: a tuple of strings of the supported os", "issue {1}\".format(feature, PCLUSTER_ISSUES_LINK) ) return supported_features def get_instance_vcpus(region, instance_type): \"\"\"", "except ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) def _get_master_server_ip(stack_name): \"\"\" Get the", "json object representing the file content :raises ClientError if unable", "if get_region().startswith(\"us-gov\") else \"aws\" def paginate_boto3(method, **kwargs): \"\"\" Return a", "boto3.client(\"s3\").exceptions.NoSuchBucket: pass except ClientError: print(\"Failed to delete bucket %s. Please", "responses. :param method: boto3 method :param kwargs: arguments to method", "% (events.get(\"LogicalResourceId\"), events.get(\"ResourceStatus\"))).ljust( 80 ) sys.stdout.write(\"\\r%s\" % resource_status) sys.stdout.flush() time.sleep(5)", "Search for a Mount Target Id in given availability zone", "found or the pricing file cannot be retrieved/parsed \"\"\" try:", "to get cluster {0} username.\".format(cluster_name)) except ClientError as e: error(e.response.get(\"Error\").get(\"Message\"))", "of the cloudformation stack :param config: Config object :return private/public", "to method :return: generator with boto3 results \"\"\" client =", "the instance type cannot be found or the pricing file", "in the logs if resource_status != \"\": LOGGER.debug(resource_status) if status", "state = instance.get(\"State\").get(\"Name\") if state != \"running\" or ip_address is", ":return: the mount_target_id or None \"\"\" mount_target_id = None if", "avail_zone def get_latest_alinux_ami_id(): \"\"\"Get latest alinux ami id.\"\"\" try: alinux_ami_id", "zipfile from io import BytesIO import boto3 import pkg_resources from", "submit an issue {1}\".format(feature, PCLUSTER_ISSUES_LINK) ) return supported_features def get_instance_vcpus(region,", "= _get_json_from_s3(region, \"instances/instances.json\") vcpus = int(instances[instance_type][\"vcpus\"]) except (KeyError, ValueError, ClientError):", "See the License for the specific language governing permissions and", "boto3.client(\"efs\").describe_mount_targets(FileSystemId=efs_fs_id) for mount_target in mount_targets.get(\"MountTargets\"): # Check to see if", "feature): \"\"\" Get a json object containing the attributes supported", "stack creation fails. :param stack_name: the stack name that we", "to use to verify stack status :return: True if the", "ip_address is None: ip_address = instance.get(\"PrivateIpAddress\") state = instance.get(\"State\").get(\"Name\") if", "ClientError as e: error(\"Unable to retrieve Amazon Linux AMI id.\\n{0}\".format(e.response.get(\"Error\").get(\"Message\")))", "cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")[0] resource_status = (\"Status: %s - %s\" % (events.get(\"LogicalResourceId\"), events.get(\"ResourceStatus\"))).ljust(", "rooted in path. The archive is created in memory and", "as e: error(e.response.get(\"Error\").get(\"Message\")) return master_ip, username def get_cli_log_file(): return os.path.expanduser(os.path.join(\"~\",", "upload_resources_artifacts(bucket_name, root): \"\"\" Upload to the specified S3 bucket the", "ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) return master_ip, username def get_cli_log_file(): return", "type to search for. :return: the number of vcpus or", "\"instances\": [\"r3.8xlarge\", ..., \"m5.4xlarge\"] } } } :param region: AWS", "status = get_stack(stack_name, cfn_client).get(\"StackStatus\") resource_status = \"\" while status ==", "= get_stack_name(cluster_name) stack_result = cfn.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] stack_status = stack_result.get(\"StackStatus\") valid_status =", "private/public ip address \"\"\" ec2 = boto3.client(\"ec2\") master_id = get_master_server_id(stack_name)", "the License. # fmt: off from __future__ import absolute_import, print_function", "os.walk(path): for file in files: ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), start=path))", "All Rights Reserved. # # Licensed under the Apache License,", "get_templates_bucket_path(): \"\"\"Return a string containing the path of bucket.\"\"\" region", "to know the supported os :return: a tuple of strings", "for res in os.listdir(root): if os.path.isdir(os.path.join(root, res)): bucket.upload_fileobj(zip_dir(os.path.join(root, res)), \"%s/artifacts.zip\"", "ec2.describe_instances(InstanceIds=[master_id]).get(\"Reservations\")[0].get(\"Instances\")[0] ip_address = instance.get(\"PublicIpAddress\") if ip_address is None: ip_address =", "import time import urllib.request import zipfile from io import BytesIO", ":param params: Cloudformation Stack Parameters :param key_name: Parameter Key :return:", "containing the path of bucket.\"\"\" region = get_region() s3_suffix =", "if region.startswith(\"cn\") else \"\" return \"https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/\".format( REGION=region, S3_SUFFIX=s3_suffix ) def", "if that parameter exists, otherwise None \"\"\" param_value = next((i.get(\"ParameterValue\")", "isinstance(e, ClientError): code = e.response.get(\"Error\").get(\"Code\") if code == \"InvalidAccessKeyId\": error(e.response.get(\"Error\").get(\"Message\"))", "!= \"running\" or ip_address is None: error(\"MasterServer: %s\\nCannot get ip", "in os.walk(path): for file in files: ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file),", "= boto3.client(\"cloudformation\") try: stack_name = get_stack_name(cluster_name) stack_result = cfn.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] stack_status", "the directory rooted in root path. All dirs contained in", "stack_outputs if o.get(\"OutputKey\") == output_key), None) def get_stack(stack_name, cfn_client=None): \"\"\"", "as ziph: for root, _, files in os.walk(path): for file", "Stack Parameters :param key_name: Parameter Key :return: ParameterValue if that", "the content of the directory rooted in root path. All", "return PCLUSTER_STACK_PREFIX + cluster_name def get_region(): \"\"\"Get AWS_DEFAULT_REGION from the", "containing all the attributes supported by feature \"\"\" try: features", "# OR CONDITIONS OF ANY KIND, express or implied. See", "if scheduler == \"awsbatch\" else \"alinux\", \"centos6\", \"centos7\", \"ubuntu1604\", \"ubuntu1804\"", "stack_result = cfn.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] stack_status = stack_result.get(\"StackStatus\") valid_status = [\"CREATE_COMPLETE\", \"UPDATE_COMPLETE\",", "id.\"\"\" try: alinux_ami_id = ( boto3.client(\"ssm\") .get_parameters_by_path(Path=\"/aws/service/ami-amazon-linux-latest\") .get(\"Parameters\")[0] .get(\"Value\") )", "get_stack(stack_name, cfn_client=None): \"\"\" Get the output for a DescribeStacks action", "= get_stack_output_value(outputs, \"MasterPublicIP\") or _get_master_server_ip(stack_name) username = get_stack_output_value(outputs, \"ClusterUser\") else:", "ip_address def get_master_ip_and_username(cluster_name): cfn = boto3.client(\"cloudformation\") try: stack_name = get_stack_name(cluster_name)", "# Licensed under the Apache License, Version 2.0 (the \"License\").", "Region :param feature: the feature to search for, i.e. \"efa\"", "in events: if event.get(\"ResourceStatus\") == \"CREATE_FAILED\": LOGGER.info( \" - %s", "\"\"\"Print a warning message.\"\"\" print(\"WARNING: {0}\".format(message)) def error(message, fail_on_error=True): \"\"\"Print", "paginator = client.get_paginator(method.__name__) for page in paginator.paginate(**kwargs).result_key_iters(): for result in", "supported scheduler \"\"\" return \"sge\", \"torque\", \"slurm\", \"awsbatch\" def get_stack_output_value(stack_outputs,", "{0}\".format(message)) def get_cfn_param(params, key_name): \"\"\" Get parameter value from Cloudformation", "Wait for the stack creation to be completed and notify", "\"features/feature_whitelist.json\") supported_features = features.get(\"Features\").get(feature) except (ValueError, ClientError, KeyError) as e:", ":param method: boto3 method :param kwargs: arguments to method :return:", "(ClientError, IndexError) as e: error(e.response.get(\"Error\").get(\"Message\")) def verify_stack_creation(stack_name, cfn_client): \"\"\" Wait", "for a Mount Target Id in given availability zone for", "id of the master server, or [] if no master", "try: resources = boto3.client(\"cloudformation\").describe_stack_resource( StackName=stack_name, LogicalResourceId=\"MasterServer\" ) return resources.get(\"StackResourceDetail\").get(\"PhysicalResourceId\") except", "# the License. A copy of the License is located", ":param kwargs: arguments to method :return: generator with boto3 results", "file_name): \"\"\" Get pricing file (if none) and parse content", "if fail_on_error is true.\"\"\" if fail_on_error: sys.exit(\"ERROR: {0}\".format(message)) else: print(\"ERROR:", ") ) return avail_zone def get_latest_alinux_ami_id(): \"\"\"Get latest alinux ami", "we want to know the supported os :return: a tuple", "return False return True def get_templates_bucket_path(): \"\"\"Return a string containing", "ANY KIND, express or implied. See the License for the", "get_supported_features(region, feature): \"\"\" Get a json object containing the attributes", "avail_zone == get_avail_zone(mount_target_subnet): mount_target_id = mount_target.get(\"MountTargetId\") return mount_target_id def get_avail_zone(subnet_id):", "\"DELETE_IN_PROGRESS\"] if stack_status in invalid_status: error(\"Unable to retrieve master_ip and", "in files: ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), start=path)) file_out.seek(0) return file_out", "for i in params if i.get(\"ParameterKey\") == key_name), \"NONE\") return", "= client.get_paginator(method.__name__) for page in paginator.paginate(**kwargs).result_key_iters(): for result in page:", "import logging import os import sys import time import urllib.request", "error(\"Unable to retrieve master_ip and username for a stack in", "value from Cloudformation Stack Parameters. :param params: Cloudformation Stack Parameters", "def get_templates_bucket_path(): \"\"\"Return a string containing the path of bucket.\"\"\"", "= features.get(\"Features\").get(feature) except (ValueError, ClientError, KeyError) as e: if isinstance(e,", ":return private/public ip address \"\"\" ec2 = boto3.client(\"ec2\") master_id =", "print(\"WARNING: {0}\".format(message)) def error(message, fail_on_error=True): \"\"\"Print an error message and", "absolute_import, print_function # isort:skip from future import standard_library # isort:skip", "event.get(\"ResourceType\"), event.get(\"LogicalResourceId\"), event.get(\"ResourceStatusReason\"), ) return False return True def get_templates_bucket_path():", "get_supported_schedulers(): \"\"\" Return a tuple of the scheduler supported by", "instance_type: the instance type to search for. :return: the number", ":return: True if the creation was successful, false otherwise. \"\"\"", "name of the S3 bucket where files are uploaded :param", "= boto3.resource(\"s3\").Bucket(bucket_name) bucket.objects.all().delete() bucket.delete() except boto3.client(\"s3\").exceptions.NoSuchBucket: pass except ClientError: print(\"Failed", "not master_ip: error(\"Failed to get cluster {0} ip.\".format(cluster_name)) if not", "[\"alinux\", \"centos7\"], \"schedulers\": [\"sge\", \"slurm\", \"torque\"] }, \"batch\": { \"instances\":", "outputs = stack_result.get(\"Outputs\") master_ip = get_stack_output_value(outputs, \"MasterPublicIP\") or _get_master_server_ip(stack_name) username", "the instance types available on EC2, independent by the region.\"\"\"", "to see if there is an existing mt in the", "_get_master_server_ip(stack_name): \"\"\" Get the IP Address of the MasterServer. :param", "types available on EC2, independent by the region.\"\"\" return boto3.client(\"ec2\").meta.service_model.shape_for(\"InstanceType\").enum", "# limitations under the License. # fmt: off from __future__", "detect availability zone for subnet {0}.\\n{1}\".format( subnet_id, e.response.get(\"Error\").get(\"Message\") ) )", "for subnet {0}.\\n{1}\".format( subnet_id, e.response.get(\"Error\").get(\"Message\") ) ) return avail_zone def", "the output for a DescribeStacks action for the given Stack.", "return \"alinux\" if scheduler == \"awsbatch\" else \"alinux\", \"centos6\", \"centos7\",", "if not cfn_client: cfn_client = boto3.client(\"cloudformation\") return cfn_client.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] except (ClientError,", "ValueError if unable to decode the file content \"\"\" bucket_name", "= get_cfn_param(stack_result.get(\"Parameters\"), \"BaseOS\") username = mappings.get(base_os).get(\"User\") if not master_ip: error(\"Failed", "][\"version\"] if get_installed_version() < latest: print(\"Info: There is a newer", "of the master server, or [] if no master server.\"\"\"", "validate {0}. This is probably a bug on our end.", "Key :return: OutputValue if that output exists, otherwise None \"\"\"", "will be uploaded to $bucket_name. :param bucket_name: name of the", "returned by the function. :param path: directory containing the resources", "\"https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/\".format( REGION=region, S3_SUFFIX=s3_suffix ) def get_installed_version(): \"\"\"Get the version of", "boto3.client(\"cloudformation\") return cfn_client.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] except (ClientError, IndexError) as e: error(e.response.get(\"Error\").get(\"Message\")) def", "status update in the logs if resource_status != \"\": LOGGER.debug(resource_status)", "master_id: error(\"MasterServer not running. Can't SSH\") instance = ec2.describe_instances(InstanceIds=[master_id]).get(\"Reservations\")[0].get(\"Instances\")[0] ip_address", "creation was successful, false otherwise. \"\"\" status = get_stack(stack_name, cfn_client).get(\"StackStatus\")", "content :raises ClientError if unable to download the file :raises", "# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights", "except s3_client.exceptions.BucketAlreadyOwnedByYou: print(\"Bucket already exists\") def delete_s3_bucket(bucket_name): \"\"\" Delete an", "file is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "zip archive containing all files and dirs rooted in path.", "with boto3 results \"\"\" client = method.__self__ paginator = client.get_paginator(method.__name__)", "action for the given Stack. :param stack_name: the CFN Stack", "stack_status in valid_status: outputs = stack_result.get(\"Outputs\") master_ip = get_stack_output_value(outputs, \"MasterPublicIP\")", ":param config: Config object :return private/public ip address \"\"\" ec2", "e.response.get(\"Error\").get(\"Code\") if code == \"InvalidAccessKeyId\": error(e.response.get(\"Error\").get(\"Message\")) error( \"Failed validate {0}.", "isort:skip standard_library.install_aliases() # fmt: on import json import logging import", "except ClientError as e: error(\"Unable to retrieve Amazon Linux AMI", "= _get_master_server_ip(stack_name) template = cfn.get_template(StackName=stack_name) mappings = template.get(\"TemplateBody\").get(\"Mappings\").get(\"OSFeatures\") base_os =", "the instance type to search for. :return: the number of", ":param region: aws region \"\"\" s3_client = boto3.client(\"s3\") \"\"\" :type", "to retrieve master_ip and username for a stack in the", "are uploaded :param root: root directory containing the resources to", "sys import time import urllib.request import zipfile from io import", "client = method.__self__ paginator = client.get_paginator(method.__name__) for page in paginator.paginate(**kwargs).result_key_iters():", "containing the attributes supported by a feature, for example. {", "example. { \"Features\": { \"efa\": { \"instances\": [\"c5n.18xlarge\", \"p3dn.24xlarge\", \"i3en.24xlarge\"],", "def paginate_boto3(method, **kwargs): \"\"\" Return a generator for a boto3", "delete \"\"\" try: bucket = boto3.resource(\"s3\").Bucket(bucket_name) bucket.objects.all().delete() bucket.delete() except boto3.client(\"s3\").exceptions.NoSuchBucket:", "Stack Parameters. :param params: Cloudformation Stack Parameters :param key_name: Parameter", "exception to the stderr if fail_on_error is true.\"\"\" if fail_on_error:", "= \"https://github.com/aws/aws-parallelcluster/issues\" def get_stack_name(cluster_name): return PCLUSTER_STACK_PREFIX + cluster_name def get_region():", "except (ValueError, ClientError, KeyError) as e: if isinstance(e, ClientError): code", "events: if event.get(\"ResourceStatus\") == \"CREATE_FAILED\": LOGGER.info( \" - %s %s", "else \"alinux\", \"centos6\", \"centos7\", \"ubuntu1604\", \"ubuntu1804\" def get_supported_schedulers(): \"\"\" Return", "get_master_server_id(stack_name): \"\"\"Return the physical id of the master server, or", "name of the S3 bucket to delete \"\"\" try: bucket", "if unable to download the file :raises ValueError if unable", "bucket_name: name of the S3 bucket to create :param region:", "2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. #", "given availability zone for the given EFS file system id.", "except (KeyError, ValueError, ClientError): vcpus = -1 return vcpus def", "not username: error(\"Failed to get cluster {0} username.\".format(cluster_name)) except ClientError", "archive containing all files and dirs rooted in path. The", "address \"\"\" ec2 = boto3.client(\"ec2\") master_id = get_master_server_id(stack_name) if not", "if code == \"InvalidAccessKeyId\": error(e.response.get(\"Error\").get(\"Message\")) error( \"Failed validate {0}. This", "id. :param efs_fs_id: EFS file system Id :param avail_zone: Availability", "instance type to search for. :return: the number of vcpus", "username = get_stack_output_value(outputs, \"ClusterUser\") else: # Stack is in CREATING,", "latest: print(\"Info: There is a newer version %s of AWS", "from future import standard_library # isort:skip standard_library.install_aliases() # fmt: on", "retrieved/parsed \"\"\" try: instances = _get_json_from_s3(region, \"instances/instances.json\") vcpus = int(instances[instance_type][\"vcpus\"])", "except (ClientError, IndexError) as e: error(e.response.get(\"Error\").get(\"Message\")) def verify_stack_creation(stack_name, cfn_client): \"\"\"", "bucket.objects.all().delete() bucket.delete() except boto3.client(\"s3\").exceptions.NoSuchBucket: pass except ClientError: print(\"Failed to delete", "files: ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), start=path)) file_out.seek(0) return file_out def", "boto3.client(\"cloudformation\").describe_stack_resource( StackName=stack_name, LogicalResourceId=\"MasterServer\" ) return resources.get(\"StackResourceDetail\").get(\"PhysicalResourceId\") except ClientError as e:", "not master_id: error(\"MasterServer not running. Can't SSH\") instance = ec2.describe_instances(InstanceIds=[master_id]).get(\"Reservations\")[0].get(\"Instances\")[0]", "\"UPDATE_ROLLBACK_COMPLETE\"] invalid_status = [\"DELETE_COMPLETE\", \"DELETE_IN_PROGRESS\"] if stack_status in invalid_status: error(\"Unable", "zone to verify :return: the mount_target_id or None \"\"\" mount_target_id", "fails. :param stack_name: the stack name that we should verify", "supported os \"\"\" return \"alinux\" if scheduler == \"awsbatch\" else", "Mount Target Id in given availability zone for the given", "# # or in the \"LICENSE.txt\" file accompanying this file.", "to verify stack status :return: True if the creation was", "zone for subnet {0}.\\n{1}\".format( subnet_id, e.response.get(\"Error\").get(\"Message\") ) ) return avail_zone", ":param stack_name: The name of the cloudformation stack :param config:", "with # the License. A copy of the License is", "EFS file system Id :param avail_zone: Availability zone to verify", "= json.loads(urllib.request.urlopen(\"https://pypi.python.org/pypi/aws-parallelcluster/json\").read())[ \"info\" ][\"version\"] if get_installed_version() < latest: print(\"Info: There", "elif os.path.isfile(os.path.join(root, res)): bucket.upload_file(os.path.join(root, res), res) def _get_json_from_s3(region, file_name): \"\"\"", "..., \"m5.4xlarge\"] } } } :param region: AWS Region :param", "= -1 return vcpus def get_supported_os(scheduler): \"\"\" Return a tuple", "# # http://aws.amazon.com/apache2.0/ # # or in the \"LICENSE.txt\" file", "an S3 bucket together with all stored objects. :param bucket_name:", "LOGGER.critical(\"\\nCluster creation failed. Failed events:\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\") for event", "from __future__ import absolute_import, print_function # isort:skip from future import", "Parameter Key :return: ParameterValue if that parameter exists, otherwise None", "except in compliance with # the License. A copy of", "features = _get_json_from_s3(region, \"features/feature_whitelist.json\") supported_features = features.get(\"Features\").get(feature) except (ValueError, ClientError,", "ip address.\", state.upper()) return ip_address def get_master_ip_and_username(cluster_name): cfn = boto3.client(\"cloudformation\")", "get :return: a json object representing the file content :raises", "status: {0}\".format(stack_status)) elif stack_status in valid_status: outputs = stack_result.get(\"Outputs\") master_ip", "S3 bucket the content of the directory rooted in root", "\"\"\" mount_target_id = None if efs_fs_id: mount_targets = boto3.client(\"efs\").describe_mount_targets(FileSystemId=efs_fs_id) for", "Get output value from Cloudformation Stack Output. :param stack_outputs: Cloudformation", "__future__ import absolute_import, print_function # isort:skip from future import standard_library", "All dirs contained in root dir will be uploaded as", "by the function. :param path: directory containing the resources to", "code == \"InvalidAccessKeyId\": error(e.response.get(\"Error\").get(\"Message\")) error( \"Failed validate {0}. This is", "Output Key :return: OutputValue if that output exists, otherwise None", "by feature \"\"\" try: features = _get_json_from_s3(region, \"features/feature_whitelist.json\") supported_features =", "[] if no master server.\"\"\" try: resources = boto3.client(\"cloudformation\").describe_stack_resource( StackName=stack_name,", "for example. { \"Features\": { \"efa\": { \"instances\": [\"c5n.18xlarge\", \"p3dn.24xlarge\",", "ec2 = boto3.client(\"ec2\") master_id = get_master_server_id(stack_name) if not master_id: error(\"MasterServer", "\"\"\" Return a generator for a boto3 call, this allows", "exists, otherwise None \"\"\" return next((o.get(\"OutputValue\") for o in stack_outputs", "\"centos7\"], \"schedulers\": [\"sge\", \"slurm\", \"torque\"] }, \"batch\": { \"instances\": [\"r3.8xlarge\",", "Cloudformation Stack Outputs :param output_key: Output Key :return: OutputValue if", "output_key: Output Key :return: OutputValue if that output exists, otherwise", "to $bucket_name. :param bucket_name: name of the S3 bucket where", "under the License. # fmt: off from __future__ import absolute_import,", "bucket = boto3.resource(\"s3\").Bucket(bucket_name) for res in os.listdir(root): if os.path.isdir(os.path.join(root, res)):", "alinux ami id.\"\"\" try: alinux_ami_id = ( boto3.client(\"ssm\") .get_parameters_by_path(Path=\"/aws/service/ami-amazon-linux-latest\") .get(\"Parameters\")[0]", "stack_status = stack_result.get(\"StackStatus\") valid_status = [\"CREATE_COMPLETE\", \"UPDATE_COMPLETE\", \"UPDATE_ROLLBACK_COMPLETE\"] invalid_status =", "License, Version 2.0 (the \"License\"). You may not use this", "containing the resources to archive. :return file handler pointing to", "as e: error(e.response.get(\"Error\").get(\"Message\")) def _get_master_server_ip(stack_name): \"\"\" Get the IP Address", "of all the instance types available on EC2, independent by", "% latest) except Exception: pass def warn(message): \"\"\"Print a warning", "2.0 (the \"License\"). You may not use this file except", "return next((o.get(\"OutputValue\") for o in stack_outputs if o.get(\"OutputKey\") == output_key),", "There is a newer version %s of AWS ParallelCluster available.\"", "StackName=stack_name, LogicalResourceId=\"MasterServer\" ) return resources.get(\"StackResourceDetail\").get(\"PhysicalResourceId\") except ClientError as e: error(e.response.get(\"Error\").get(\"Message\"))", "def get_supported_schedulers(): \"\"\" Return a tuple of the scheduler supported", "the specific language governing permissions and # limitations under the", ".get(\"Value\") ) except ClientError as e: error(\"Unable to retrieve Amazon", "to detect availability zone for subnet {0}.\\n{1}\".format( subnet_id, e.response.get(\"Error\").get(\"Message\") )", "the status: {0}\".format(stack_status)) elif stack_status in valid_status: outputs = stack_result.get(\"Outputs\")", "file_contents = boto3.resource(\"s3\").Object(bucket_name, file_name).get()[\"Body\"].read().decode(\"utf-8\") return json.loads(file_contents) def get_supported_features(region, feature): \"\"\"", "bucket.upload_file(os.path.join(root, res), res) def _get_json_from_s3(region, file_name): \"\"\" Get pricing file", "if status != \"CREATE_COMPLETE\": LOGGER.critical(\"\\nCluster creation failed. Failed events:\") events", "verify :param cfn_client: the CloudFormation client to use to verify", "express or implied. See the License for the specific language", "io import BytesIO import boto3 import pkg_resources from botocore.exceptions import", ":raises ValueError if unable to decode the file content \"\"\"", "== key_name), \"NONE\") return param_value.strip() def get_efs_mount_target_id(efs_fs_id, avail_zone): \"\"\" Search", "get_stack(stack_name, cfn_client).get(\"StackStatus\") resource_status = \"\" while status == \"CREATE_IN_PROGRESS\": status", "cluster {0} username.\".format(cluster_name)) except ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) return master_ip,", "instances = _get_json_from_s3(region, \"instances/instances.json\") vcpus = int(instances[instance_type][\"vcpus\"]) except (KeyError, ValueError,", "get_region(): \"\"\"Get AWS_DEFAULT_REGION from the environment.\"\"\" return os.environ.get(\"AWS_DEFAULT_REGION\") def get_partition():", "ip_address = instance.get(\"PrivateIpAddress\") state = instance.get(\"State\").get(\"Name\") if state != \"running\"", "def list_ec2_instance_types(): \"\"\"Return a list of all the instance types", "and parse content as json. :param region: AWS Region :param", "strings of the supported scheduler \"\"\" return \"sge\", \"torque\", \"slurm\",", "os \"\"\" return \"alinux\" if scheduler == \"awsbatch\" else \"alinux\",", "if region != \"us-east-1\": s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={\"LocationConstraint\": region}) else: s3_client.create_bucket(Bucket=bucket_name) except", "features.get(\"Features\").get(feature) except (ValueError, ClientError, KeyError) as e: if isinstance(e, ClientError):", "a json object containing the attributes supported by a feature,", "# or in the \"LICENSE.txt\" file accompanying this file. This", "} } } :param region: AWS Region :param feature: the", "or [] if no master server.\"\"\" try: resources = boto3.client(\"cloudformation\").describe_stack_resource(", "bucket_name: name of the S3 bucket where files are uploaded", "get_stack(stack_name, cfn_client).get(\"StackStatus\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")[0] resource_status = (\"Status: %s -", ":param stack_name: the stack name that we should verify :param", "o in stack_outputs if o.get(\"OutputKey\") == output_key), None) def get_stack(stack_name,", "mount_targets = boto3.client(\"efs\").describe_mount_targets(FileSystemId=efs_fs_id) for mount_target in mount_targets.get(\"MountTargets\"): # Check to", "for a DescribeStacks action for the given Stack. :param stack_name:", "= get_region() s3_suffix = \".cn\" if region.startswith(\"cn\") else \"\" return", "existing mt in the az of the stack mount_target_subnet =", "import standard_library # isort:skip standard_library.install_aliases() # fmt: on import json", "the specific scheduler. :param scheduler: the scheduler for which we", "method: boto3 method :param kwargs: arguments to method :return: generator", "\"efa\" \"awsbatch\" :return: json object containing all the attributes supported", "= boto3.client(\"ec2\") master_id = get_master_server_id(stack_name) if not master_id: error(\"MasterServer not", "[\"sge\", \"slurm\", \"torque\"] }, \"batch\": { \"instances\": [\"r3.8xlarge\", ..., \"m5.4xlarge\"]", "specific scheduler. :param scheduler: the scheduler for which we want", "zipfile.ZipFile(file_out, \"w\", zipfile.ZIP_DEFLATED) as ziph: for root, _, files in", "\"UPDATE_COMPLETE\", \"UPDATE_ROLLBACK_COMPLETE\"] invalid_status = [\"DELETE_COMPLETE\", \"DELETE_IN_PROGRESS\"] if stack_status in invalid_status:", "as json. :param region: AWS Region :param file_name the object", "the License. A copy of the License is located at", "pricing file cannot be retrieved/parsed \"\"\" try: instances = _get_json_from_s3(region,", "None: ip_address = instance.get(\"PrivateIpAddress\") state = instance.get(\"State\").get(\"Name\") if state !=", "the physical id of the master server, or [] if", ":return: a json object representing the file content :raises ClientError", "return \"aws-us-gov\" if get_region().startswith(\"us-gov\") else \"aws\" def paginate_boto3(method, **kwargs): \"\"\"", "{1}\".format(feature, PCLUSTER_ISSUES_LINK) ) return supported_features def get_instance_vcpus(region, instance_type): \"\"\" Get", "compliance with # the License. A copy of the License", "\"\"\" bucket_name = \"{0}-aws-parallelcluster\".format(region) file_contents = boto3.resource(\"s3\").Object(bucket_name, file_name).get()[\"Body\"].read().decode(\"utf-8\") return json.loads(file_contents)", "cfn.get_template(StackName=stack_name) mappings = template.get(\"TemplateBody\").get(\"Mappings\").get(\"OSFeatures\") base_os = get_cfn_param(stack_result.get(\"Parameters\"), \"BaseOS\") username =", "region.startswith(\"cn\") else \"\" return \"https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/\".format( REGION=region, S3_SUFFIX=s3_suffix ) def get_installed_version():", "if avail_zone == get_avail_zone(mount_target_subnet): mount_target_id = mount_target.get(\"MountTargetId\") return mount_target_id def", "logs if resource_status != \"\": LOGGER.debug(resource_status) if status != \"CREATE_COMPLETE\":", "to verify :return: the mount_target_id or None \"\"\" mount_target_id =", "scheduler == \"awsbatch\" else \"alinux\", \"centos6\", \"centos7\", \"ubuntu1604\", \"ubuntu1804\" def", "None: error(\"MasterServer: %s\\nCannot get ip address.\", state.upper()) return ip_address def", "cannot be retrieved/parsed \"\"\" try: instances = _get_json_from_s3(region, \"instances/instances.json\") vcpus", "print(\"Info: There is a newer version %s of AWS ParallelCluster", "ValueError, ClientError): vcpus = -1 return vcpus def get_supported_os(scheduler): \"\"\"", ":param feature: the feature to search for, i.e. \"efa\" \"awsbatch\"", "Id :param avail_zone: Availability zone to verify :return: the mount_target_id", "except ClientError: print(\"Failed to delete bucket %s. Please delete it", "< latest: print(\"Info: There is a newer version %s of", "its affiliates. All Rights Reserved. # # Licensed under the", "Output. :param stack_outputs: Cloudformation Stack Outputs :param output_key: Output Key", "def error(message, fail_on_error=True): \"\"\"Print an error message and Raise SystemExit", "method :return: generator with boto3 results \"\"\" client = method.__self__", "(if none) and parse content as json. :param region: AWS", "status == \"CREATE_IN_PROGRESS\": status = get_stack(stack_name, cfn_client).get(\"StackStatus\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")[0]", "Key :return: ParameterValue if that parameter exists, otherwise None \"\"\"", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES #", ":param output_key: Output Key :return: OutputValue if that output exists,", "fail_on_error=True): \"\"\"Print an error message and Raise SystemExit exception to", "get_master_ip_and_username(cluster_name): cfn = boto3.client(\"cloudformation\") try: stack_name = get_stack_name(cluster_name) stack_result =", "tuple of strings of the supported os \"\"\" return \"alinux\"", "see if there is an existing mt in the az", "\"torque\"] }, \"batch\": { \"instances\": [\"r3.8xlarge\", ..., \"m5.4xlarge\"] } }", "the resources to upload. \"\"\" bucket = boto3.resource(\"s3\").Bucket(bucket_name) for res", "the IP Address of the MasterServer. :param stack_name: The name", "delete bucket %s. Please delete it manually.\" % bucket_name) def", "cfn = boto3.client(\"cloudformation\") try: stack_name = get_stack_name(cluster_name) stack_result = cfn.describe_stacks(StackName=stack_name).get(\"Stacks\")[0]", "for the given instance type. :param region: AWS Region :param", "AWS ParallelCluster available.\" % latest) except Exception: pass def warn(message):", "dirs rooted in path. The archive is created in memory", "in page: yield result def create_s3_bucket(bucket_name, region): \"\"\" Create a", "stack_name: The name of the cloudformation stack :param config: Config", "CFN Stack name :param cfn_client: boto3 cloudformation client :return: the", "in memory and a file handler is returned by the", "creation failed. Failed events:\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\") for event in", "a tuple of strings of the supported scheduler \"\"\" return", "while status == \"CREATE_IN_PROGRESS\": status = get_stack(stack_name, cfn_client).get(\"StackStatus\") events =", "value from Cloudformation Stack Output. :param stack_outputs: Cloudformation Stack Outputs", "\"aws-us-gov\" if get_region().startswith(\"us-gov\") else \"aws\" def paginate_boto3(method, **kwargs): \"\"\" Return", "attributes supported by a feature, for example. { \"Features\": {", "cfn_client).get(\"StackStatus\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")[0] resource_status = (\"Status: %s - %s\"", "\"%s/artifacts.zip\" % res) elif os.path.isfile(os.path.join(root, res)): bucket.upload_file(os.path.join(root, res), res) def", "error(\"MasterServer not running. Can't SSH\") instance = ec2.describe_instances(InstanceIds=[master_id]).get(\"Reservations\")[0].get(\"Instances\")[0] ip_address =", "file_name the object name to get :return: a json object", "try: if not cfn_client: cfn_client = boto3.client(\"cloudformation\") return cfn_client.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] except", "Create a new S3 bucket. :param bucket_name: name of the", "to get :return: a json object representing the file content", "uploaded as zip files to $bucket_name/$dir_name/artifacts.zip. All files contained in", ":raises ClientError if unable to download the file :raises ValueError", "for which we want to know the supported os :return:", "def warn(message): \"\"\"Print a warning message.\"\"\" print(\"WARNING: {0}\".format(message)) def error(message,", "a Mount Target Id in given availability zone for the", "cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\") for event in events: if event.get(\"ResourceStatus\") == \"CREATE_FAILED\": LOGGER.info(", "get_instance_vcpus(region, instance_type): \"\"\" Get number of vcpus for the given", "MasterServer. :param stack_name: The name of the cloudformation stack :param", "\"\"\"Check if the current package version is the latest one.\"\"\"", "from Cloudformation Stack Output. :param stack_outputs: Cloudformation Stack Outputs :param", "= BytesIO() with zipfile.ZipFile(file_out, \"w\", zipfile.ZIP_DEFLATED) as ziph: for root,", "\"instances/instances.json\") vcpus = int(instances[instance_type][\"vcpus\"]) except (KeyError, ValueError, ClientError): vcpus =", "username = mappings.get(base_os).get(\"User\") if not master_ip: error(\"Failed to get cluster", "\"awsbatch\" else \"alinux\", \"centos6\", \"centos7\", \"ubuntu1604\", \"ubuntu1804\" def get_supported_schedulers(): \"\"\"", "object containing all the attributes supported by feature \"\"\" try:", "is an existing mt in the az of the stack", "generator with boto3 results \"\"\" client = method.__self__ paginator =", "if resource_status != \"\": LOGGER.debug(resource_status) if status != \"CREATE_COMPLETE\": LOGGER.critical(\"\\nCluster", "\"i3en.24xlarge\"], \"baseos\": [\"alinux\", \"centos7\"], \"schedulers\": [\"sge\", \"slurm\", \"torque\"] }, \"batch\":", "\"\"\" try: bucket = boto3.resource(\"s3\").Bucket(bucket_name) bucket.objects.all().delete() bucket.delete() except boto3.client(\"s3\").exceptions.NoSuchBucket: pass", "\"\"\" Wait for the stack creation to be completed and", "fmt: off from __future__ import absolute_import, print_function # isort:skip from", "LOGGER.debug(resource_status) if status != \"CREATE_COMPLETE\": LOGGER.critical(\"\\nCluster creation failed. Failed events:\")", "attributes supported by feature \"\"\" try: features = _get_json_from_s3(region, \"features/feature_whitelist.json\")", "the supported os :return: a tuple of strings of the", "def get_installed_version(): \"\"\"Get the version of the installed aws-parallelcluster package.\"\"\"", "True if the creation was successful, false otherwise. \"\"\" status", "for the specific scheduler. :param scheduler: the scheduler for which", "verify stack status :return: True if the creation was successful,", "if not master_ip: error(\"Failed to get cluster {0} ip.\".format(cluster_name)) if", "given Stack. :param stack_name: the CFN Stack name :param cfn_client:", "a DescribeStacks action for the given Stack. :param stack_name: the", "def delete_s3_bucket(bucket_name): \"\"\" Delete an S3 bucket together with all", "as e: error(\"Unable to retrieve Amazon Linux AMI id.\\n{0}\".format(e.response.get(\"Error\").get(\"Message\"))) return", "representing the file content :raises ClientError if unable to download", "def get_supported_features(region, feature): \"\"\" Get a json object containing the", "import os import sys import time import urllib.request import zipfile", "-1 if the instance type cannot be found or the", "if ip_address is None: ip_address = instance.get(\"PrivateIpAddress\") state = instance.get(\"State\").get(\"Name\")", "= instance.get(\"State\").get(\"Name\") if state != \"running\" or ip_address is None:", "by parallelcluster for the specific scheduler. :param scheduler: the scheduler", "(KeyError, ValueError, ClientError): vcpus = -1 return vcpus def get_supported_os(scheduler):", "the Stack data type \"\"\" try: if not cfn_client: cfn_client", "feature: the feature to search for, i.e. \"efa\" \"awsbatch\" :return:", "otherwise None \"\"\" return next((o.get(\"OutputValue\") for o in stack_outputs if", "except ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) return master_ip, username def get_cli_log_file():", "the S3 bucket to create :param region: aws region \"\"\"", "this allows pagination over an arbitrary number of responses. :param", "files contained in root dir will be uploaded to $bucket_name.", "return boto3.client(\"ec2\").meta.service_model.shape_for(\"InstanceType\").enum def get_master_server_id(stack_name): \"\"\"Return the physical id of the", "version is the latest one.\"\"\" try: latest = json.loads(urllib.request.urlopen(\"https://pypi.python.org/pypi/aws-parallelcluster/json\").read())[ \"info\"", "of AWS ParallelCluster available.\" % latest) except Exception: pass def", "get_avail_zone(mount_target_subnet): mount_target_id = mount_target.get(\"MountTargetId\") return mount_target_id def get_avail_zone(subnet_id): avail_zone =", "directory containing the resources to archive. :return file handler pointing", "master server.\"\"\" try: resources = boto3.client(\"cloudformation\").describe_stack_resource( StackName=stack_name, LogicalResourceId=\"MasterServer\" ) return", "error(message, fail_on_error=True): \"\"\"Print an error message and Raise SystemExit exception", "contained in root dir will be uploaded to $bucket_name. :param", "get_master_server_id(stack_name) if not master_id: error(\"MasterServer not running. Can't SSH\") instance", "or _get_master_server_ip(stack_name) username = get_stack_output_value(outputs, \"ClusterUser\") else: # Stack is", "directory rooted in root path. All dirs contained in root", "cfn_client = boto3.client(\"cloudformation\") return cfn_client.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] except (ClientError, IndexError) as e:", "supported by feature \"\"\" try: features = _get_json_from_s3(region, \"features/feature_whitelist.json\") supported_features", "def get_latest_alinux_ami_id(): \"\"\"Get latest alinux ami id.\"\"\" try: alinux_ami_id =", "LogicalResourceId=\"MasterServer\" ) return resources.get(\"StackResourceDetail\").get(\"PhysicalResourceId\") except ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) def", "on our end. \" \"Please submit an issue {1}\".format(feature, PCLUSTER_ISSUES_LINK)", ":param key_name: Parameter Key :return: ParameterValue if that parameter exists,", "== \"CREATE_FAILED\": LOGGER.info( \" - %s %s %s\", event.get(\"ResourceType\"), event.get(\"LogicalResourceId\"),", "import boto3 import pkg_resources from botocore.exceptions import ClientError LOGGER =", "import BytesIO import boto3 import pkg_resources from botocore.exceptions import ClientError", "e: error(e.response.get(\"Error\").get(\"Message\")) def verify_stack_creation(stack_name, cfn_client): \"\"\" Wait for the stack", "if event.get(\"ResourceStatus\") == \"CREATE_FAILED\": LOGGER.info( \" - %s %s %s\",", "\"\"\"Return a list of all the instance types available on", "get_cfn_param(params, key_name): \"\"\" Get parameter value from Cloudformation Stack Parameters.", "to upload. \"\"\" bucket = boto3.resource(\"s3\").Bucket(bucket_name) for res in os.listdir(root):", "os.path.isfile(os.path.join(root, res)): bucket.upload_file(os.path.join(root, res), res) def _get_json_from_s3(region, file_name): \"\"\" Get", "vcpus = int(instances[instance_type][\"vcpus\"]) except (KeyError, ValueError, ClientError): vcpus = -1", "address.\", state.upper()) return ip_address def get_master_ip_and_username(cluster_name): cfn = boto3.client(\"cloudformation\") try:", "Get the output for a DescribeStacks action for the given", ":return: OutputValue if that output exists, otherwise None \"\"\" return", "[\"CREATE_COMPLETE\", \"UPDATE_COMPLETE\", \"UPDATE_ROLLBACK_COMPLETE\"] invalid_status = [\"DELETE_COMPLETE\", \"DELETE_IN_PROGRESS\"] if stack_status in", "for the specific language governing permissions and # limitations under", "_get_json_from_s3(region, \"features/feature_whitelist.json\") supported_features = features.get(\"Features\").get(feature) except (ValueError, ClientError, KeyError) as", "file in files: ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), start=path)) file_out.seek(0) return", "\"us-east-1\": s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={\"LocationConstraint\": region}) else: s3_client.create_bucket(Bucket=bucket_name) except s3_client.exceptions.BucketAlreadyOwnedByYou: print(\"Bucket already", "cfn_client: the CloudFormation client to use to verify stack status", "upload. \"\"\" bucket = boto3.resource(\"s3\").Bucket(bucket_name) for res in os.listdir(root): if", "res in os.listdir(root): if os.path.isdir(os.path.join(root, res)): bucket.upload_fileobj(zip_dir(os.path.join(root, res)), \"%s/artifacts.zip\" %", "get_region().startswith(\"us-gov\") else \"aws\" def paginate_boto3(method, **kwargs): \"\"\" Return a generator", "ziph: for root, _, files in os.walk(path): for file in", "json. :param region: AWS Region :param file_name the object name", "language governing permissions and # limitations under the License. #", "os supported by parallelcluster for the specific scheduler. :param scheduler:", "WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND, express or", "A copy of the License is located at # #", ":param region: AWS Region :param instance_type: the instance type to", "\"\"\" try: if region != \"us-east-1\": s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={\"LocationConstraint\": region}) else:", "try: latest = json.loads(urllib.request.urlopen(\"https://pypi.python.org/pypi/aws-parallelcluster/json\").read())[ \"info\" ][\"version\"] if get_installed_version() < latest:", "was successful, false otherwise. \"\"\" status = get_stack(stack_name, cfn_client).get(\"StackStatus\") resource_status", "type cannot be found or the pricing file cannot be", "error(\"Failed to get cluster {0} ip.\".format(cluster_name)) if not username: error(\"Failed", "the attributes supported by a feature, for example. { \"Features\":", "file_out.seek(0) return file_out def upload_resources_artifacts(bucket_name, root): \"\"\" Upload to the", ":param region: AWS Region :param feature: the feature to search", "the installed aws-parallelcluster package.\"\"\" return pkg_resources.get_distribution(\"aws-parallelcluster\").version def check_if_latest_version(): \"\"\"Check if", "be uploaded as zip files to $bucket_name/$dir_name/artifacts.zip. All files contained", "events:\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\") for event in events: if event.get(\"ResourceStatus\")", "region): \"\"\" Create a new S3 bucket. :param bucket_name: name", "be uploaded to $bucket_name. :param bucket_name: name of the S3", "master_ip: error(\"Failed to get cluster {0} ip.\".format(cluster_name)) if not username:", "= boto3.client(\"s3\") \"\"\" :type : pyboto3.s3 \"\"\" try: if region", "get_stack_output_value(stack_outputs, output_key): \"\"\" Get output value from Cloudformation Stack Output.", "a stack in the status: {0}\".format(stack_status)) elif stack_status in valid_status:", "in path. The archive is created in memory and a", "permissions and # limitations under the License. # fmt: off", "SystemExit exception to the stderr if fail_on_error is true.\"\"\" if", "resource_status) sys.stdout.flush() time.sleep(5) # print the last status update in", "not cfn_client: cfn_client = boto3.client(\"cloudformation\") return cfn_client.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] except (ClientError, IndexError)", "Cloudformation Stack Parameters :param key_name: Parameter Key :return: ParameterValue if", "a bug on our end. \" \"Please submit an issue", "boto3.client(\"s3\") \"\"\" :type : pyboto3.s3 \"\"\" try: if region !=", "AWS_DEFAULT_REGION from the environment.\"\"\" return os.environ.get(\"AWS_DEFAULT_REGION\") def get_partition(): \"\"\"Get partition", "boto3 results \"\"\" client = method.__self__ paginator = client.get_paginator(method.__name__) for", "\"Failed validate {0}. This is probably a bug on our", "file (if none) and parse content as json. :param region:", "mount_target.get(\"SubnetId\") if avail_zone == get_avail_zone(mount_target_subnet): mount_target_id = mount_target.get(\"MountTargetId\") return mount_target_id", "the logs if resource_status != \"\": LOGGER.debug(resource_status) if status !=", "return resources.get(\"StackResourceDetail\").get(\"PhysicalResourceId\") except ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) def _get_master_server_ip(stack_name): \"\"\"", "s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={\"LocationConstraint\": region}) else: s3_client.create_bucket(Bucket=bucket_name) except s3_client.exceptions.BucketAlreadyOwnedByYou: print(\"Bucket already exists\")", "bucket_name: name of the S3 bucket to delete \"\"\" try:", "def get_cfn_param(params, key_name): \"\"\" Get parameter value from Cloudformation Stack", "invalid_status = [\"DELETE_COMPLETE\", \"DELETE_IN_PROGRESS\"] if stack_status in invalid_status: error(\"Unable to", "zip files to $bucket_name/$dir_name/artifacts.zip. All files contained in root dir", "\"awsbatch\" def get_stack_output_value(stack_outputs, output_key): \"\"\" Get output value from Cloudformation", "the last status update in the logs if resource_status !=", "boto3 cloudformation client :return: the Stack data type \"\"\" try:", "# fmt: off from __future__ import absolute_import, print_function # isort:skip", "\"\": LOGGER.debug(resource_status) if status != \"CREATE_COMPLETE\": LOGGER.critical(\"\\nCluster creation failed. Failed", "def get_master_server_id(stack_name): \"\"\"Return the physical id of the master server,", "= None if efs_fs_id: mount_targets = boto3.client(\"efs\").describe_mount_targets(FileSystemId=efs_fs_id) for mount_target in", "version %s of AWS ParallelCluster available.\" % latest) except Exception:", "file content :raises ClientError if unable to download the file", "WARRANTIES # OR CONDITIONS OF ANY KIND, express or implied.", "be retrieved/parsed \"\"\" try: instances = _get_json_from_s3(region, \"instances/instances.json\") vcpus =", "= stack_result.get(\"StackStatus\") valid_status = [\"CREATE_COMPLETE\", \"UPDATE_COMPLETE\", \"UPDATE_ROLLBACK_COMPLETE\"] invalid_status = [\"DELETE_COMPLETE\",", ".get_parameters_by_path(Path=\"/aws/service/ami-amazon-linux-latest\") .get(\"Parameters\")[0] .get(\"Value\") ) except ClientError as e: error(\"Unable to", "Cloudformation Stack Output. :param stack_outputs: Cloudformation Stack Outputs :param output_key:", "vcpus for the given instance type. :param region: AWS Region", "latest alinux ami id.\"\"\" try: alinux_ami_id = ( boto3.client(\"ssm\") .get_parameters_by_path(Path=\"/aws/service/ami-amazon-linux-latest\")", "boto3.resource(\"s3\").Object(bucket_name, file_name).get()[\"Body\"].read().decode(\"utf-8\") return json.loads(file_contents) def get_supported_features(region, feature): \"\"\" Get a", "try: stack_name = get_stack_name(cluster_name) stack_result = cfn.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] stack_status = stack_result.get(\"StackStatus\")", "os.listdir(root): if os.path.isdir(os.path.join(root, res)): bucket.upload_fileobj(zip_dir(os.path.join(root, res)), \"%s/artifacts.zip\" % res) elif", ":param bucket_name: name of the S3 bucket where files are", ":param avail_zone: Availability zone to verify :return: the mount_target_id or", "{0} ip.\".format(cluster_name)) if not username: error(\"Failed to get cluster {0}", "s3_client = boto3.client(\"s3\") \"\"\" :type : pyboto3.s3 \"\"\" try: if", "file cannot be retrieved/parsed \"\"\" try: instances = _get_json_from_s3(region, \"instances/instances.json\")", "\"\"\" Return a tuple of the os supported by parallelcluster", ":return: the number of vcpus or -1 if the instance", "probably a bug on our end. \" \"Please submit an", "boto3 import pkg_resources from botocore.exceptions import ClientError LOGGER = logging.getLogger(__name__)", "availability zone for subnet {0}.\\n{1}\".format( subnet_id, e.response.get(\"Error\").get(\"Message\") ) ) return", "AWS Region :param file_name the object name to get :return:", "isort:skip from future import standard_library # isort:skip standard_library.install_aliases() # fmt:", "search for. :return: the number of vcpus or -1 if", "content of the directory rooted in root path. All dirs", ":param cfn_client: boto3 cloudformation client :return: the Stack data type", "base_os = get_cfn_param(stack_result.get(\"Parameters\"), \"BaseOS\") username = mappings.get(base_os).get(\"User\") if not master_ip:", "fail_on_error is true.\"\"\" if fail_on_error: sys.exit(\"ERROR: {0}\".format(message)) else: print(\"ERROR: {0}\".format(message))", "instance.get(\"PrivateIpAddress\") state = instance.get(\"State\").get(\"Name\") if state != \"running\" or ip_address", "cfn_client).get(\"StackStatus\") resource_status = \"\" while status == \"CREATE_IN_PROGRESS\": status =", "compressed archive. \"\"\" file_out = BytesIO() with zipfile.ZipFile(file_out, \"w\", zipfile.ZIP_DEFLATED)", "\"ClusterUser\") else: # Stack is in CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE", "import zipfile from io import BytesIO import boto3 import pkg_resources", "verify_stack_creation(stack_name, cfn_client): \"\"\" Wait for the stack creation to be", "and dirs rooted in path. The archive is created in", "stderr if fail_on_error is true.\"\"\" if fail_on_error: sys.exit(\"ERROR: {0}\".format(message)) else:", "files are uploaded :param root: root directory containing the resources", "EFS file system id. :param efs_fs_id: EFS file system Id", "is returned by the function. :param path: directory containing the", ":param stack_name: the CFN Stack name :param cfn_client: boto3 cloudformation", "\"aws\" def paginate_boto3(method, **kwargs): \"\"\" Return a generator for a", "json import logging import os import sys import time import", "{0}\".format(stack_status)) elif stack_status in valid_status: outputs = stack_result.get(\"Outputs\") master_ip =", "try: alinux_ami_id = ( boto3.client(\"ssm\") .get_parameters_by_path(Path=\"/aws/service/ami-amazon-linux-latest\") .get(\"Parameters\")[0] .get(\"Value\") ) except", "= boto3.resource(\"s3\").Bucket(bucket_name) for res in os.listdir(root): if os.path.isdir(os.path.join(root, res)): bucket.upload_fileobj(zip_dir(os.path.join(root,", "files to $bucket_name/$dir_name/artifacts.zip. All files contained in root dir will", ":return: json object containing all the attributes supported by feature", "alinux_ami_id def list_ec2_instance_types(): \"\"\"Return a list of all the instance", "together with all stored objects. :param bucket_name: name of the", "CloudFormation client to use to verify stack status :return: True", "for root, _, files in os.walk(path): for file in files:", "region: AWS Region :param file_name the object name to get", "# # Licensed under the Apache License, Version 2.0 (the", "= instance.get(\"PrivateIpAddress\") state = instance.get(\"State\").get(\"Name\") if state != \"running\" or", "on import json import logging import os import sys import", "the creation was successful, false otherwise. \"\"\" status = get_stack(stack_name,", "not running. Can't SSH\") instance = ec2.describe_instances(InstanceIds=[master_id]).get(\"Reservations\")[0].get(\"Instances\")[0] ip_address = instance.get(\"PublicIpAddress\")", "kwargs: arguments to method :return: generator with boto3 results \"\"\"", "\"running\" or ip_address is None: error(\"MasterServer: %s\\nCannot get ip address.\",", "master_ip = _get_master_server_ip(stack_name) template = cfn.get_template(StackName=stack_name) mappings = template.get(\"TemplateBody\").get(\"Mappings\").get(\"OSFeatures\") base_os", "\"\"\" try: instances = _get_json_from_s3(region, \"instances/instances.json\") vcpus = int(instances[instance_type][\"vcpus\"]) except", "def zip_dir(path): \"\"\" Create a zip archive containing all files", "return cfn_client.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] except (ClientError, IndexError) as e: error(e.response.get(\"Error\").get(\"Message\")) def verify_stack_creation(stack_name,", "path: directory containing the resources to archive. :return file handler", "os.path.isdir(os.path.join(root, res)): bucket.upload_fileobj(zip_dir(os.path.join(root, res)), \"%s/artifacts.zip\" % res) elif os.path.isfile(os.path.join(root, res)):", "\"\"\" status = get_stack(stack_name, cfn_client).get(\"StackStatus\") resource_status = \"\" while status", "successful, false otherwise. \"\"\" status = get_stack(stack_name, cfn_client).get(\"StackStatus\") resource_status =", "stack_status in invalid_status: error(\"Unable to retrieve master_ip and username for", "res)), \"%s/artifacts.zip\" % res) elif os.path.isfile(os.path.join(root, res)): bucket.upload_file(os.path.join(root, res), res)", "error message and Raise SystemExit exception to the stderr if", "it manually.\" % bucket_name) def zip_dir(path): \"\"\" Create a zip", "last status update in the logs if resource_status != \"\":", "for file in files: ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), start=path)) file_out.seek(0)", "supported_features def get_instance_vcpus(region, instance_type): \"\"\" Get number of vcpus for", "%s %s %s\", event.get(\"ResourceType\"), event.get(\"LogicalResourceId\"), event.get(\"ResourceStatusReason\"), ) return False return", "config: Config object :return private/public ip address \"\"\" ec2 =", "the scheduler for which we want to know the supported", "= boto3.client(\"cloudformation\") return cfn_client.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] except (ClientError, IndexError) as e: error(e.response.get(\"Error\").get(\"Message\"))", "Upload to the specified S3 bucket the content of the", "in root dir will be uploaded to $bucket_name. :param bucket_name:", "over an arbitrary number of responses. :param method: boto3 method", "{0}.\\n{1}\".format( subnet_id, e.response.get(\"Error\").get(\"Message\") ) ) return avail_zone def get_latest_alinux_ami_id(): \"\"\"Get", "AWS Region :param feature: the feature to search for, i.e.", "\"\"\" ec2 = boto3.client(\"ec2\") master_id = get_master_server_id(stack_name) if not master_id:", "\"efa\": { \"instances\": [\"c5n.18xlarge\", \"p3dn.24xlarge\", \"i3en.24xlarge\"], \"baseos\": [\"alinux\", \"centos7\"], \"schedulers\":", "IndexError) as e: error(e.response.get(\"Error\").get(\"Message\")) def verify_stack_creation(stack_name, cfn_client): \"\"\" Wait for", "= get_stack_output_value(outputs, \"ClusterUser\") else: # Stack is in CREATING, CREATED_FAILED,", "the stderr if fail_on_error is true.\"\"\" if fail_on_error: sys.exit(\"ERROR: {0}\".format(message))", "aws region \"\"\" s3_client = boto3.client(\"s3\") \"\"\" :type : pyboto3.s3", "else: # Stack is in CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE but", "KIND, express or implied. See the License for the specific", "file handler is returned by the function. :param path: directory", "a warning message.\"\"\" print(\"WARNING: {0}\".format(message)) def error(message, fail_on_error=True): \"\"\"Print an", "creation fails. :param stack_name: the stack name that we should", "if o.get(\"OutputKey\") == output_key), None) def get_stack(stack_name, cfn_client=None): \"\"\" Get", "file handler pointing to the compressed archive. \"\"\" file_out =", "CONDITIONS OF ANY KIND, express or implied. See the License", "AWS_DEFAULT_REGION set in the environment.\"\"\" return \"aws-us-gov\" if get_region().startswith(\"us-gov\") else", "stored objects. :param bucket_name: name of the S3 bucket to", "Stack data type \"\"\" try: if not cfn_client: cfn_client =", "\"CREATE_FAILED\": LOGGER.info( \" - %s %s %s\", event.get(\"ResourceType\"), event.get(\"LogicalResourceId\"), event.get(\"ResourceStatusReason\"),", "by the region.\"\"\" return boto3.client(\"ec2\").meta.service_model.shape_for(\"InstanceType\").enum def get_master_server_id(stack_name): \"\"\"Return the physical", "\"\"\"Get partition for the AWS_DEFAULT_REGION set in the environment.\"\"\" return", "= [\"CREATE_COMPLETE\", \"UPDATE_COMPLETE\", \"UPDATE_ROLLBACK_COMPLETE\"] invalid_status = [\"DELETE_COMPLETE\", \"DELETE_IN_PROGRESS\"] if stack_status", "is probably a bug on our end. \" \"Please submit", "to $bucket_name/$dir_name/artifacts.zip. All files contained in root dir will be", "search for, i.e. \"efa\" \"awsbatch\" :return: json object containing all", "\"NONE\") return param_value.strip() def get_efs_mount_target_id(efs_fs_id, avail_zone): \"\"\" Search for a", "the os supported by parallelcluster for the specific scheduler. :param", "%s\", event.get(\"ResourceType\"), event.get(\"LogicalResourceId\"), event.get(\"ResourceStatusReason\"), ) return False return True def", "\"\"\" bucket = boto3.resource(\"s3\").Bucket(bucket_name) for res in os.listdir(root): if os.path.isdir(os.path.join(root,", "be found or the pricing file cannot be retrieved/parsed \"\"\"", "Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.", "sys.stdout.flush() time.sleep(5) # print the last status update in the", "True def get_templates_bucket_path(): \"\"\"Return a string containing the path of", "warning message.\"\"\" print(\"WARNING: {0}\".format(message)) def error(message, fail_on_error=True): \"\"\"Print an error", "get_cfn_param(stack_result.get(\"Parameters\"), \"BaseOS\") username = mappings.get(base_os).get(\"User\") if not master_ip: error(\"Failed to", "output_key): \"\"\" Get output value from Cloudformation Stack Output. :param", "s3_suffix = \".cn\" if region.startswith(\"cn\") else \"\" return \"https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/\".format( REGION=region,", "{ \"instances\": [\"c5n.18xlarge\", \"p3dn.24xlarge\", \"i3en.24xlarge\"], \"baseos\": [\"alinux\", \"centos7\"], \"schedulers\": [\"sge\",", "true.\"\"\" if fail_on_error: sys.exit(\"ERROR: {0}\".format(message)) else: print(\"ERROR: {0}\".format(message)) def get_cfn_param(params,", "return file_out def upload_resources_artifacts(bucket_name, root): \"\"\" Upload to the specified", "json.loads(file_contents) def get_supported_features(region, feature): \"\"\" Get a json object containing", "vcpus or -1 if the instance type cannot be found", "none) and parse content as json. :param region: AWS Region", "strings of the supported os \"\"\" return \"alinux\" if scheduler", "\"\"\" return \"sge\", \"torque\", \"slurm\", \"awsbatch\" def get_stack_output_value(stack_outputs, output_key): \"\"\"", "the stack creation to be completed and notify if the", "is running master_ip = _get_master_server_ip(stack_name) template = cfn.get_template(StackName=stack_name) mappings =", "return \"sge\", \"torque\", \"slurm\", \"awsbatch\" def get_stack_output_value(stack_outputs, output_key): \"\"\" Get", "already exists\") def delete_s3_bucket(bucket_name): \"\"\" Delete an S3 bucket together", "{0}\".format(message)) else: print(\"ERROR: {0}\".format(message)) def get_cfn_param(params, key_name): \"\"\" Get parameter", "in valid_status: outputs = stack_result.get(\"Outputs\") master_ip = get_stack_output_value(outputs, \"MasterPublicIP\") or", "valid_status = [\"CREATE_COMPLETE\", \"UPDATE_COMPLETE\", \"UPDATE_ROLLBACK_COMPLETE\"] invalid_status = [\"DELETE_COMPLETE\", \"DELETE_IN_PROGRESS\"] if", "file_out = BytesIO() with zipfile.ZipFile(file_out, \"w\", zipfile.ZIP_DEFLATED) as ziph: for", "key_name): \"\"\" Get parameter value from Cloudformation Stack Parameters. :param", "contained in root dir will be uploaded as zip files", "content as json. :param region: AWS Region :param file_name the", "zipfile.ZIP_DEFLATED) as ziph: for root, _, files in os.walk(path): for", "method :param kwargs: arguments to method :return: generator with boto3", "= next((i.get(\"ParameterValue\") for i in params if i.get(\"ParameterKey\") == key_name),", "state.upper()) return ip_address def get_master_ip_and_username(cluster_name): cfn = boto3.client(\"cloudformation\") try: stack_name", "version of the installed aws-parallelcluster package.\"\"\" return pkg_resources.get_distribution(\"aws-parallelcluster\").version def check_if_latest_version():", "for mount_target in mount_targets.get(\"MountTargets\"): # Check to see if there", "feature to search for, i.e. \"efa\" \"awsbatch\" :return: json object", "pass except ClientError: print(\"Failed to delete bucket %s. Please delete", "fail_on_error: sys.exit(\"ERROR: {0}\".format(message)) else: print(\"ERROR: {0}\".format(message)) def get_cfn_param(params, key_name): \"\"\"", "key_name), \"NONE\") return param_value.strip() def get_efs_mount_target_id(efs_fs_id, avail_zone): \"\"\" Search for", "import json import logging import os import sys import time", "stack name that we should verify :param cfn_client: the CloudFormation", "error(\"Unable to retrieve Amazon Linux AMI id.\\n{0}\".format(e.response.get(\"Error\").get(\"Message\"))) return alinux_ami_id def", ":type : pyboto3.s3 \"\"\" try: if region != \"us-east-1\": s3_client.create_bucket(Bucket=bucket_name,", ": pyboto3.s3 \"\"\" try: if region != \"us-east-1\": s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={\"LocationConstraint\":", "80 ) sys.stdout.write(\"\\r%s\" % resource_status) sys.stdout.flush() time.sleep(5) # print the", "cfn_client): \"\"\" Wait for the stack creation to be completed", "CreateBucketConfiguration={\"LocationConstraint\": region}) else: s3_client.create_bucket(Bucket=bucket_name) except s3_client.exceptions.BucketAlreadyOwnedByYou: print(\"Bucket already exists\") def", "efs_fs_id: EFS file system Id :param avail_zone: Availability zone to", "region}) else: s3_client.create_bucket(Bucket=bucket_name) except s3_client.exceptions.BucketAlreadyOwnedByYou: print(\"Bucket already exists\") def delete_s3_bucket(bucket_name):", "avail_zone = None try: avail_zone = ( boto3.client(\"ec2\").describe_subnets(SubnetIds=[subnet_id]).get(\"Subnets\")[0].get(\"AvailabilityZone\") ) except", "os import sys import time import urllib.request import zipfile from", "def upload_resources_artifacts(bucket_name, root): \"\"\" Upload to the specified S3 bucket", "server, or [] if no master server.\"\"\" try: resources =", "json.loads(urllib.request.urlopen(\"https://pypi.python.org/pypi/aws-parallelcluster/json\").read())[ \"info\" ][\"version\"] if get_installed_version() < latest: print(\"Info: There is", "False return True def get_templates_bucket_path(): \"\"\"Return a string containing the", "cfn.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] stack_status = stack_result.get(\"StackStatus\") valid_status = [\"CREATE_COMPLETE\", \"UPDATE_COMPLETE\", \"UPDATE_ROLLBACK_COMPLETE\"] invalid_status", "the supported os \"\"\" return \"alinux\" if scheduler == \"awsbatch\"", "to decode the file content \"\"\" bucket_name = \"{0}-aws-parallelcluster\".format(region) file_contents", "next((o.get(\"OutputValue\") for o in stack_outputs if o.get(\"OutputKey\") == output_key), None)", "system id. :param efs_fs_id: EFS file system Id :param avail_zone:", "if there is an existing mt in the az of", "= boto3.client(\"efs\").describe_mount_targets(FileSystemId=efs_fs_id) for mount_target in mount_targets.get(\"MountTargets\"): # Check to see", ") return avail_zone def get_latest_alinux_ami_id(): \"\"\"Get latest alinux ami id.\"\"\"", "OR CONDITIONS OF ANY KIND, express or implied. See the", "if isinstance(e, ClientError): code = e.response.get(\"Error\").get(\"Code\") if code == \"InvalidAccessKeyId\":", "of the License is located at # # http://aws.amazon.com/apache2.0/ #", "\" \"Please submit an issue {1}\".format(feature, PCLUSTER_ISSUES_LINK) ) return supported_features", "AMI id.\\n{0}\".format(e.response.get(\"Error\").get(\"Message\"))) return alinux_ami_id def list_ec2_instance_types(): \"\"\"Return a list of", "sys.stdout.write(\"\\r%s\" % resource_status) sys.stdout.flush() time.sleep(5) # print the last status", "Return a tuple of the scheduler supported by parallelcluster. :return:", "output exists, otherwise None \"\"\" return next((o.get(\"OutputValue\") for o in", "get_installed_version() < latest: print(\"Info: There is a newer version %s", "package version is the latest one.\"\"\" try: latest = json.loads(urllib.request.urlopen(\"https://pypi.python.org/pypi/aws-parallelcluster/json\").read())[", "archive. \"\"\" file_out = BytesIO() with zipfile.ZipFile(file_out, \"w\", zipfile.ZIP_DEFLATED) as", "master_ip and username for a stack in the status: {0}\".format(stack_status))", "params: Cloudformation Stack Parameters :param key_name: Parameter Key :return: ParameterValue", "\"slurm\", \"awsbatch\" def get_stack_output_value(stack_outputs, output_key): \"\"\" Get output value from", "mappings.get(base_os).get(\"User\") if not master_ip: error(\"Failed to get cluster {0} ip.\".format(cluster_name))", "boto3 call, this allows pagination over an arbitrary number of", ":param stack_outputs: Cloudformation Stack Outputs :param output_key: Output Key :return:", "data type \"\"\" try: if not cfn_client: cfn_client = boto3.client(\"cloudformation\")", "delete_s3_bucket(bucket_name): \"\"\" Delete an S3 bucket together with all stored", "end. \" \"Please submit an issue {1}\".format(feature, PCLUSTER_ISSUES_LINK) ) return", "Availability zone to verify :return: the mount_target_id or None \"\"\"", "if not master_id: error(\"MasterServer not running. Can't SSH\") instance =", "or ROLLBACK_COMPLETE but MasterServer is running master_ip = _get_master_server_ip(stack_name) template", "supported by parallelcluster. :return: a tuple of strings of the", "instance.get(\"State\").get(\"Name\") if state != \"running\" or ip_address is None: error(\"MasterServer:", "Stack is in CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE but MasterServer is", "warn(message): \"\"\"Print a warning message.\"\"\" print(\"WARNING: {0}\".format(message)) def error(message, fail_on_error=True):", "tuple of the os supported by parallelcluster for the specific", "\" - %s %s %s\", event.get(\"ResourceType\"), event.get(\"LogicalResourceId\"), event.get(\"ResourceStatusReason\"), ) return", "This is probably a bug on our end. \" \"Please", "unable to download the file :raises ValueError if unable to", "import absolute_import, print_function # isort:skip from future import standard_library #", "_, files in os.walk(path): for file in files: ziph.write(os.path.join(root, file),", "files in os.walk(path): for file in files: ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root,", "try: bucket = boto3.resource(\"s3\").Bucket(bucket_name) bucket.objects.all().delete() bucket.delete() except boto3.client(\"s3\").exceptions.NoSuchBucket: pass except", "= cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")[0] resource_status = (\"Status: %s - %s\" % (events.get(\"LogicalResourceId\"),", "print_function # isort:skip from future import standard_library # isort:skip standard_library.install_aliases()", "the License for the specific language governing permissions and #", "paginate_boto3(method, **kwargs): \"\"\" Return a generator for a boto3 call,", "for result in page: yield result def create_s3_bucket(bucket_name, region): \"\"\"", "unable to decode the file content \"\"\" bucket_name = \"{0}-aws-parallelcluster\".format(region)", "file accompanying this file. This file is distributed on an", "valid_status: outputs = stack_result.get(\"Outputs\") master_ip = get_stack_output_value(outputs, \"MasterPublicIP\") or _get_master_server_ip(stack_name)", "template.get(\"TemplateBody\").get(\"Mappings\").get(\"OSFeatures\") base_os = get_cfn_param(stack_result.get(\"Parameters\"), \"BaseOS\") username = mappings.get(base_os).get(\"User\") if not", "# http://aws.amazon.com/apache2.0/ # # or in the \"LICENSE.txt\" file accompanying", "governing permissions and # limitations under the License. # fmt:", "in os.listdir(root): if os.path.isdir(os.path.join(root, res)): bucket.upload_fileobj(zip_dir(os.path.join(root, res)), \"%s/artifacts.zip\" % res)", "message and Raise SystemExit exception to the stderr if fail_on_error", "_get_master_server_ip(stack_name) username = get_stack_output_value(outputs, \"ClusterUser\") else: # Stack is in", "call, this allows pagination over an arbitrary number of responses.", "pyboto3.s3 \"\"\" try: if region != \"us-east-1\": s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={\"LocationConstraint\": region})", "Stack. :param stack_name: the CFN Stack name :param cfn_client: boto3", "ParallelCluster available.\" % latest) except Exception: pass def warn(message): \"\"\"Print", "pkg_resources.get_distribution(\"aws-parallelcluster\").version def check_if_latest_version(): \"\"\"Check if the current package version is", "get_stack_output_value(outputs, \"MasterPublicIP\") or _get_master_server_ip(stack_name) username = get_stack_output_value(outputs, \"ClusterUser\") else: #", "= \"\" while status == \"CREATE_IN_PROGRESS\": status = get_stack(stack_name, cfn_client).get(\"StackStatus\")", "%s - %s\" % (events.get(\"LogicalResourceId\"), events.get(\"ResourceStatus\"))).ljust( 80 ) sys.stdout.write(\"\\r%s\" %", "to be completed and notify if the stack creation fails.", "objects. :param bucket_name: name of the S3 bucket to delete", "of bucket.\"\"\" region = get_region() s3_suffix = \".cn\" if region.startswith(\"cn\")", "print(\"Bucket already exists\") def delete_s3_bucket(bucket_name): \"\"\" Delete an S3 bucket", "!= \"\": LOGGER.debug(resource_status) if status != \"CREATE_COMPLETE\": LOGGER.critical(\"\\nCluster creation failed.", "error(\"MasterServer: %s\\nCannot get ip address.\", state.upper()) return ip_address def get_master_ip_and_username(cluster_name):", "except boto3.client(\"s3\").exceptions.NoSuchBucket: pass except ClientError: print(\"Failed to delete bucket %s.", "REGION=region, S3_SUFFIX=s3_suffix ) def get_installed_version(): \"\"\"Get the version of the", "the stack mount_target_subnet = mount_target.get(\"SubnetId\") if avail_zone == get_avail_zone(mount_target_subnet): mount_target_id", "the environment.\"\"\" return os.environ.get(\"AWS_DEFAULT_REGION\") def get_partition(): \"\"\"Get partition for the", "LOGGER.info( \" - %s %s %s\", event.get(\"ResourceType\"), event.get(\"LogicalResourceId\"), event.get(\"ResourceStatusReason\"), )", "\"alinux\" if scheduler == \"awsbatch\" else \"alinux\", \"centos6\", \"centos7\", \"ubuntu1604\",", "PCLUSTER_STACK_PREFIX = \"parallelcluster-\" PCLUSTER_ISSUES_LINK = \"https://github.com/aws/aws-parallelcluster/issues\" def get_stack_name(cluster_name): return PCLUSTER_STACK_PREFIX", "of the supported os \"\"\" return \"alinux\" if scheduler ==", "root: root directory containing the resources to upload. \"\"\" bucket", "%s %s\", event.get(\"ResourceType\"), event.get(\"LogicalResourceId\"), event.get(\"ResourceStatusReason\"), ) return False return True", "if that output exists, otherwise None \"\"\" return next((o.get(\"OutputValue\") for", "S3 bucket to create :param region: aws region \"\"\" s3_client", "verify :return: the mount_target_id or None \"\"\" mount_target_id = None", "username.\".format(cluster_name)) except ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) return master_ip, username def", "return pkg_resources.get_distribution(\"aws-parallelcluster\").version def check_if_latest_version(): \"\"\"Check if the current package version", "Can't SSH\") instance = ec2.describe_instances(InstanceIds=[master_id]).get(\"Reservations\")[0].get(\"Instances\")[0] ip_address = instance.get(\"PublicIpAddress\") if ip_address", "where files are uploaded :param root: root directory containing the", "= get_stack(stack_name, cfn_client).get(\"StackStatus\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")[0] resource_status = (\"Status: %s", "\"\"\"Get latest alinux ami id.\"\"\" try: alinux_ami_id = ( boto3.client(\"ssm\")", "PCLUSTER_STACK_PREFIX + cluster_name def get_region(): \"\"\"Get AWS_DEFAULT_REGION from the environment.\"\"\"", "return ip_address def get_master_ip_and_username(cluster_name): cfn = boto3.client(\"cloudformation\") try: stack_name =", "cfn_client=None): \"\"\" Get the output for a DescribeStacks action for", "IS\" BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND,", "\".cn\" if region.startswith(\"cn\") else \"\" return \"https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/\".format( REGION=region, S3_SUFFIX=s3_suffix )", "\"\"\" Upload to the specified S3 bucket the content of", "path of bucket.\"\"\" region = get_region() s3_suffix = \".cn\" if", "server.\"\"\" try: resources = boto3.client(\"cloudformation\").describe_stack_resource( StackName=stack_name, LogicalResourceId=\"MasterServer\" ) return resources.get(\"StackResourceDetail\").get(\"PhysicalResourceId\")", "false otherwise. \"\"\" status = get_stack(stack_name, cfn_client).get(\"StackStatus\") resource_status = \"\"", "otherwise. \"\"\" status = get_stack(stack_name, cfn_client).get(\"StackStatus\") resource_status = \"\" while", "the scheduler supported by parallelcluster. :return: a tuple of strings", "number of vcpus or -1 if the instance type cannot", "a new S3 bucket. :param bucket_name: name of the S3", "boto3.resource(\"s3\").Bucket(bucket_name) for res in os.listdir(root): if os.path.isdir(os.path.join(root, res)): bucket.upload_fileobj(zip_dir(os.path.join(root, res)),", "instance = ec2.describe_instances(InstanceIds=[master_id]).get(\"Reservations\")[0].get(\"Instances\")[0] ip_address = instance.get(\"PublicIpAddress\") if ip_address is None:", "retrieve master_ip and username for a stack in the status:", "= boto3.client(\"cloudformation\").describe_stack_resource( StackName=stack_name, LogicalResourceId=\"MasterServer\" ) return resources.get(\"StackResourceDetail\").get(\"PhysicalResourceId\") except ClientError as", "All files contained in root dir will be uploaded to", "the master server, or [] if no master server.\"\"\" try:", "for the given EFS file system id. :param efs_fs_id: EFS", ") return supported_features def get_instance_vcpus(region, instance_type): \"\"\" Get number of", "id.\\n{0}\".format(e.response.get(\"Error\").get(\"Message\"))) return alinux_ami_id def list_ec2_instance_types(): \"\"\"Return a list of all", "Get the IP Address of the MasterServer. :param stack_name: The", "the cloudformation stack :param config: Config object :return private/public ip", "bucket.upload_fileobj(zip_dir(os.path.join(root, res)), \"%s/artifacts.zip\" % res) elif os.path.isfile(os.path.join(root, res)): bucket.upload_file(os.path.join(root, res),", "\"BaseOS\") username = mappings.get(base_os).get(\"User\") if not master_ip: error(\"Failed to get", "should verify :param cfn_client: the CloudFormation client to use to", "== \"InvalidAccessKeyId\": error(e.response.get(\"Error\").get(\"Message\")) error( \"Failed validate {0}. This is probably", "create :param region: aws region \"\"\" s3_client = boto3.client(\"s3\") \"\"\"", "\"MasterPublicIP\") or _get_master_server_ip(stack_name) username = get_stack_output_value(outputs, \"ClusterUser\") else: # Stack", "Raise SystemExit exception to the stderr if fail_on_error is true.\"\"\"", "will be uploaded as zip files to $bucket_name/$dir_name/artifacts.zip. All files", "\"parallelcluster-\" PCLUSTER_ISSUES_LINK = \"https://github.com/aws/aws-parallelcluster/issues\" def get_stack_name(cluster_name): return PCLUSTER_STACK_PREFIX + cluster_name", "= e.response.get(\"Error\").get(\"Code\") if code == \"InvalidAccessKeyId\": error(e.response.get(\"Error\").get(\"Message\")) error( \"Failed validate", "the resources to archive. :return file handler pointing to the", "def get_supported_os(scheduler): \"\"\" Return a tuple of the os supported", "physical id of the master server, or [] if no", "a file handler is returned by the function. :param path:", "from the environment.\"\"\" return os.environ.get(\"AWS_DEFAULT_REGION\") def get_partition(): \"\"\"Get partition for", "BytesIO() with zipfile.ZipFile(file_out, \"w\", zipfile.ZIP_DEFLATED) as ziph: for root, _,", "master_ip = get_stack_output_value(outputs, \"MasterPublicIP\") or _get_master_server_ip(stack_name) username = get_stack_output_value(outputs, \"ClusterUser\")", "is None: ip_address = instance.get(\"PrivateIpAddress\") state = instance.get(\"State\").get(\"Name\") if state", "Please delete it manually.\" % bucket_name) def zip_dir(path): \"\"\" Create", "the object name to get :return: a json object representing", ":param cfn_client: the CloudFormation client to use to verify stack", "an \"AS IS\" BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF", "parse content as json. :param region: AWS Region :param file_name", "\"\"\" Get pricing file (if none) and parse content as", "return alinux_ami_id def list_ec2_instance_types(): \"\"\"Return a list of all the", "Failed events:\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\") for event in events: if", "mount_target_id = None if efs_fs_id: mount_targets = boto3.client(\"efs\").describe_mount_targets(FileSystemId=efs_fs_id) for mount_target", "master_id = get_master_server_id(stack_name) if not master_id: error(\"MasterServer not running. Can't", "try: instances = _get_json_from_s3(region, \"instances/instances.json\") vcpus = int(instances[instance_type][\"vcpus\"]) except (KeyError,", "template = cfn.get_template(StackName=stack_name) mappings = template.get(\"TemplateBody\").get(\"Mappings\").get(\"OSFeatures\") base_os = get_cfn_param(stack_result.get(\"Parameters\"), \"BaseOS\")", "event.get(\"ResourceStatusReason\"), ) return False return True def get_templates_bucket_path(): \"\"\"Return a", "S3_SUFFIX=s3_suffix ) def get_installed_version(): \"\"\"Get the version of the installed", "know the supported os :return: a tuple of strings of", "instance.get(\"PublicIpAddress\") if ip_address is None: ip_address = instance.get(\"PrivateIpAddress\") state =", "% bucket_name) def zip_dir(path): \"\"\" Create a zip archive containing", "import pkg_resources from botocore.exceptions import ClientError LOGGER = logging.getLogger(__name__) PCLUSTER_STACK_PREFIX", "file content \"\"\" bucket_name = \"{0}-aws-parallelcluster\".format(region) file_contents = boto3.resource(\"s3\").Object(bucket_name, file_name).get()[\"Body\"].read().decode(\"utf-8\")", "logging import os import sys import time import urllib.request import", "package.\"\"\" return pkg_resources.get_distribution(\"aws-parallelcluster\").version def check_if_latest_version(): \"\"\"Check if the current package", "boto3.client(\"ec2\").describe_subnets(SubnetIds=[subnet_id]).get(\"Subnets\")[0].get(\"AvailabilityZone\") ) except ClientError as e: LOGGER.debug( \"Unable to detect", "the path of bucket.\"\"\" region = get_region() s3_suffix = \".cn\"", "is in CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE but MasterServer is running", "Stack Outputs :param output_key: Output Key :return: OutputValue if that", "= \"parallelcluster-\" PCLUSTER_ISSUES_LINK = \"https://github.com/aws/aws-parallelcluster/issues\" def get_stack_name(cluster_name): return PCLUSTER_STACK_PREFIX +", "Cloudformation Stack Parameters. :param params: Cloudformation Stack Parameters :param key_name:", "in the \"LICENSE.txt\" file accompanying this file. This file is", "Address of the MasterServer. :param stack_name: The name of the", "print(\"Failed to delete bucket %s. Please delete it manually.\" %", "in compliance with # the License. A copy of the", "if efs_fs_id: mount_targets = boto3.client(\"efs\").describe_mount_targets(FileSystemId=efs_fs_id) for mount_target in mount_targets.get(\"MountTargets\"): #", "== output_key), None) def get_stack(stack_name, cfn_client=None): \"\"\" Get the output", "e: error(\"Unable to retrieve Amazon Linux AMI id.\\n{0}\".format(e.response.get(\"Error\").get(\"Message\"))) return alinux_ami_id", "to search for, i.e. \"efa\" \"awsbatch\" :return: json object containing", "pointing to the compressed archive. \"\"\" file_out = BytesIO() with", "Check to see if there is an existing mt in", "(the \"License\"). You may not use this file except in", "\"License\"). You may not use this file except in compliance", "function. :param path: directory containing the resources to archive. :return", "try: avail_zone = ( boto3.client(\"ec2\").describe_subnets(SubnetIds=[subnet_id]).get(\"Subnets\")[0].get(\"AvailabilityZone\") ) except ClientError as e:", "\"https://github.com/aws/aws-parallelcluster/issues\" def get_stack_name(cluster_name): return PCLUSTER_STACK_PREFIX + cluster_name def get_region(): \"\"\"Get", "events.get(\"ResourceStatus\"))).ljust( 80 ) sys.stdout.write(\"\\r%s\" % resource_status) sys.stdout.flush() time.sleep(5) # print", "result in page: yield result def create_s3_bucket(bucket_name, region): \"\"\" Create", "allows pagination over an arbitrary number of responses. :param method:", "stack :param config: Config object :return private/public ip address \"\"\"", "error(e.response.get(\"Error\").get(\"Message\")) def verify_stack_creation(stack_name, cfn_client): \"\"\" Wait for the stack creation", "boto3 method :param kwargs: arguments to method :return: generator with", "{0}\".format(message)) def error(message, fail_on_error=True): \"\"\"Print an error message and Raise", "S3 bucket. :param bucket_name: name of the S3 bucket to", "events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")[0] resource_status = (\"Status: %s - %s\" %", "cfn_client: cfn_client = boto3.client(\"cloudformation\") return cfn_client.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] except (ClientError, IndexError) as", "latest one.\"\"\" try: latest = json.loads(urllib.request.urlopen(\"https://pypi.python.org/pypi/aws-parallelcluster/json\").read())[ \"info\" ][\"version\"] if get_installed_version()", "= get_stack(stack_name, cfn_client).get(\"StackStatus\") resource_status = \"\" while status == \"CREATE_IN_PROGRESS\":", "\"\"\" try: features = _get_json_from_s3(region, \"features/feature_whitelist.json\") supported_features = features.get(\"Features\").get(feature) except", "boto3.resource(\"s3\").Bucket(bucket_name) bucket.objects.all().delete() bucket.delete() except boto3.client(\"s3\").exceptions.NoSuchBucket: pass except ClientError: print(\"Failed to", "$bucket_name. :param bucket_name: name of the S3 bucket where files", "None \"\"\" return next((o.get(\"OutputValue\") for o in stack_outputs if o.get(\"OutputKey\")", "Outputs :param output_key: Output Key :return: OutputValue if that output", "s3_client.create_bucket(Bucket=bucket_name) except s3_client.exceptions.BucketAlreadyOwnedByYou: print(\"Bucket already exists\") def delete_s3_bucket(bucket_name): \"\"\" Delete", "% resource_status) sys.stdout.flush() time.sleep(5) # print the last status update", "\"baseos\": [\"alinux\", \"centos7\"], \"schedulers\": [\"sge\", \"slurm\", \"torque\"] }, \"batch\": {", "in CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE but MasterServer is running master_ip", "PCLUSTER_ISSUES_LINK = \"https://github.com/aws/aws-parallelcluster/issues\" def get_stack_name(cluster_name): return PCLUSTER_STACK_PREFIX + cluster_name def", "number of responses. :param method: boto3 method :param kwargs: arguments", "start=path)) file_out.seek(0) return file_out def upload_resources_artifacts(bucket_name, root): \"\"\" Upload to", "%s\\nCannot get ip address.\", state.upper()) return ip_address def get_master_ip_and_username(cluster_name): cfn", "update in the logs if resource_status != \"\": LOGGER.debug(resource_status) if", "event.get(\"ResourceStatus\") == \"CREATE_FAILED\": LOGGER.info( \" - %s %s %s\", event.get(\"ResourceType\"),", "may not use this file except in compliance with #", "\"\"\" return \"alinux\" if scheduler == \"awsbatch\" else \"alinux\", \"centos6\",", "\"torque\", \"slurm\", \"awsbatch\" def get_stack_output_value(stack_outputs, output_key): \"\"\" Get output value", "\"\"\" return next((o.get(\"OutputValue\") for o in stack_outputs if o.get(\"OutputKey\") ==", "\"ubuntu1604\", \"ubuntu1804\" def get_supported_schedulers(): \"\"\" Return a tuple of the", "stack in the status: {0}\".format(stack_status)) elif stack_status in valid_status: outputs", "as e: if isinstance(e, ClientError): code = e.response.get(\"Error\").get(\"Code\") if code", ":param scheduler: the scheduler for which we want to know", "archive. :return file handler pointing to the compressed archive. \"\"\"", "file system id. :param efs_fs_id: EFS file system Id :param", "environment.\"\"\" return \"aws-us-gov\" if get_region().startswith(\"us-gov\") else \"aws\" def paginate_boto3(method, **kwargs):", "supported by parallelcluster for the specific scheduler. :param scheduler: the", "Target Id in given availability zone for the given EFS", "Parameters :param key_name: Parameter Key :return: ParameterValue if that parameter", "ClientError, KeyError) as e: if isinstance(e, ClientError): code = e.response.get(\"Error\").get(\"Code\")", "= [\"DELETE_COMPLETE\", \"DELETE_IN_PROGRESS\"] if stack_status in invalid_status: error(\"Unable to retrieve", "else \"aws\" def paginate_boto3(method, **kwargs): \"\"\" Return a generator for", "arbitrary number of responses. :param method: boto3 method :param kwargs:", "= mount_target.get(\"MountTargetId\") return mount_target_id def get_avail_zone(subnet_id): avail_zone = None try:", "S3 bucket together with all stored objects. :param bucket_name: name", "bucket to delete \"\"\" try: bucket = boto3.resource(\"s3\").Bucket(bucket_name) bucket.objects.all().delete() bucket.delete()", "\"\"\" Create a new S3 bucket. :param bucket_name: name of", "\"\"\" Get the IP Address of the MasterServer. :param stack_name:", "otherwise None \"\"\" param_value = next((i.get(\"ParameterValue\") for i in params", "parallelcluster for the specific scheduler. :param scheduler: the scheduler for", "object representing the file content :raises ClientError if unable to", "return True def get_templates_bucket_path(): \"\"\"Return a string containing the path", "Stack name :param cfn_client: boto3 cloudformation client :return: the Stack", "CREATED_FAILED, or ROLLBACK_COMPLETE but MasterServer is running master_ip = _get_master_server_ip(stack_name)", "get_supported_os(scheduler): \"\"\" Return a tuple of the os supported by", "want to know the supported os :return: a tuple of", "e: error(e.response.get(\"Error\").get(\"Message\")) return master_ip, username def get_cli_log_file(): return os.path.expanduser(os.path.join(\"~\", \".parallelcluster\",", "ROLLBACK_COMPLETE but MasterServer is running master_ip = _get_master_server_ip(stack_name) template =", "containing the resources to upload. \"\"\" bucket = boto3.resource(\"s3\").Bucket(bucket_name) for", "\"batch\": { \"instances\": [\"r3.8xlarge\", ..., \"m5.4xlarge\"] } } } :param", "stack_outputs: Cloudformation Stack Outputs :param output_key: Output Key :return: OutputValue", "but MasterServer is running master_ip = _get_master_server_ip(stack_name) template = cfn.get_template(StackName=stack_name)", "key_name: Parameter Key :return: ParameterValue if that parameter exists, otherwise", "os.environ.get(\"AWS_DEFAULT_REGION\") def get_partition(): \"\"\"Get partition for the AWS_DEFAULT_REGION set in", "this file except in compliance with # the License. A", "tuple of the scheduler supported by parallelcluster. :return: a tuple", ") def get_installed_version(): \"\"\"Get the version of the installed aws-parallelcluster", "in the status: {0}\".format(stack_status)) elif stack_status in valid_status: outputs =", "error(e.response.get(\"Error\").get(\"Message\")) def _get_master_server_ip(stack_name): \"\"\" Get the IP Address of the", "be completed and notify if the stack creation fails. :param", "= instance.get(\"PublicIpAddress\") if ip_address is None: ip_address = instance.get(\"PrivateIpAddress\") state", "def _get_json_from_s3(region, file_name): \"\"\" Get pricing file (if none) and", "= \"{0}-aws-parallelcluster\".format(region) file_contents = boto3.resource(\"s3\").Object(bucket_name, file_name).get()[\"Body\"].read().decode(\"utf-8\") return json.loads(file_contents) def get_supported_features(region,", "the S3 bucket to delete \"\"\" try: bucket = boto3.resource(\"s3\").Bucket(bucket_name)", "create_s3_bucket(bucket_name, region): \"\"\" Create a new S3 bucket. :param bucket_name:", "for the stack creation to be completed and notify if", "invalid_status: error(\"Unable to retrieve master_ip and username for a stack", "= mount_target.get(\"SubnetId\") if avail_zone == get_avail_zone(mount_target_subnet): mount_target_id = mount_target.get(\"MountTargetId\") return", "username: error(\"Failed to get cluster {0} username.\".format(cluster_name)) except ClientError as", "of the scheduler supported by parallelcluster. :return: a tuple of", "or its affiliates. All Rights Reserved. # # Licensed under", "botocore.exceptions import ClientError LOGGER = logging.getLogger(__name__) PCLUSTER_STACK_PREFIX = \"parallelcluster-\" PCLUSTER_ISSUES_LINK", "= None try: avail_zone = ( boto3.client(\"ec2\").describe_subnets(SubnetIds=[subnet_id]).get(\"Subnets\")[0].get(\"AvailabilityZone\") ) except ClientError", "OutputValue if that output exists, otherwise None \"\"\" return next((o.get(\"OutputValue\")", "import ClientError LOGGER = logging.getLogger(__name__) PCLUSTER_STACK_PREFIX = \"parallelcluster-\" PCLUSTER_ISSUES_LINK =", "all the instance types available on EC2, independent by the", "None try: avail_zone = ( boto3.client(\"ec2\").describe_subnets(SubnetIds=[subnet_id]).get(\"Subnets\")[0].get(\"AvailabilityZone\") ) except ClientError as", "use to verify stack status :return: True if the creation", "or implied. See the License for the specific language governing", "dir will be uploaded as zip files to $bucket_name/$dir_name/artifacts.zip. All", "time import urllib.request import zipfile from io import BytesIO import", "message.\"\"\" print(\"WARNING: {0}\".format(message)) def error(message, fail_on_error=True): \"\"\"Print an error message", "\"\"\" client = method.__self__ paginator = client.get_paginator(method.__name__) for page in", "or None \"\"\" mount_target_id = None if efs_fs_id: mount_targets =", "new S3 bucket. :param bucket_name: name of the S3 bucket", "Config object :return private/public ip address \"\"\" ec2 = boto3.client(\"ec2\")", "subnet_id, e.response.get(\"Error\").get(\"Message\") ) ) return avail_zone def get_latest_alinux_ami_id(): \"\"\"Get latest", "given EFS file system id. :param efs_fs_id: EFS file system", "a tuple of strings of the supported os \"\"\" return", "of the S3 bucket to create :param region: aws region", "scheduler. :param scheduler: the scheduler for which we want to", "the feature to search for, i.e. \"efa\" \"awsbatch\" :return: json", "of the MasterServer. :param stack_name: The name of the cloudformation", "= _get_json_from_s3(region, \"features/feature_whitelist.json\") supported_features = features.get(\"Features\").get(feature) except (ValueError, ClientError, KeyError)", "cluster_name def get_region(): \"\"\"Get AWS_DEFAULT_REGION from the environment.\"\"\" return os.environ.get(\"AWS_DEFAULT_REGION\")", "# fmt: on import json import logging import os import", "instance type. :param region: AWS Region :param instance_type: the instance", "boto3.client(\"ec2\").meta.service_model.shape_for(\"InstanceType\").enum def get_master_server_id(stack_name): \"\"\"Return the physical id of the master", "parameter value from Cloudformation Stack Parameters. :param params: Cloudformation Stack", "username for a stack in the status: {0}\".format(stack_status)) elif stack_status", "object containing the attributes supported by a feature, for example.", "there is an existing mt in the az of the", "e: if isinstance(e, ClientError): code = e.response.get(\"Error\").get(\"Code\") if code ==", "a newer version %s of AWS ParallelCluster available.\" % latest)", "except ClientError as e: LOGGER.debug( \"Unable to detect availability zone", "== get_avail_zone(mount_target_subnet): mount_target_id = mount_target.get(\"MountTargetId\") return mount_target_id def get_avail_zone(subnet_id): avail_zone", "containing all files and dirs rooted in path. The archive", "logging.getLogger(__name__) PCLUSTER_STACK_PREFIX = \"parallelcluster-\" PCLUSTER_ISSUES_LINK = \"https://github.com/aws/aws-parallelcluster/issues\" def get_stack_name(cluster_name): return", "of responses. :param method: boto3 method :param kwargs: arguments to", "_get_json_from_s3(region, file_name): \"\"\" Get pricing file (if none) and parse", "%s\" % (events.get(\"LogicalResourceId\"), events.get(\"ResourceStatus\"))).ljust( 80 ) sys.stdout.write(\"\\r%s\" % resource_status) sys.stdout.flush()", "implied. See the License for the specific language governing permissions", "return param_value.strip() def get_efs_mount_target_id(efs_fs_id, avail_zone): \"\"\" Search for a Mount", "\"\"\" Get a json object containing the attributes supported by", ":return: ParameterValue if that parameter exists, otherwise None \"\"\" param_value", "else: print(\"ERROR: {0}\".format(message)) def get_cfn_param(params, key_name): \"\"\" Get parameter value", "\"centos7\", \"ubuntu1604\", \"ubuntu1804\" def get_supported_schedulers(): \"\"\" Return a tuple of", "\"Unable to detect availability zone for subnet {0}.\\n{1}\".format( subnet_id, e.response.get(\"Error\").get(\"Message\")", "\"\"\" Get output value from Cloudformation Stack Output. :param stack_outputs:", "the stack name that we should verify :param cfn_client: the", "Linux AMI id.\\n{0}\".format(e.response.get(\"Error\").get(\"Message\"))) return alinux_ami_id def list_ec2_instance_types(): \"\"\"Return a list", "our end. \" \"Please submit an issue {1}\".format(feature, PCLUSTER_ISSUES_LINK) )", "dirs contained in root dir will be uploaded as zip", "cfn_client: boto3 cloudformation client :return: the Stack data type \"\"\"", "a string containing the path of bucket.\"\"\" region = get_region()", "if the stack creation fails. :param stack_name: the stack name", "S3 bucket to delete \"\"\" try: bucket = boto3.resource(\"s3\").Bucket(bucket_name) bucket.objects.all().delete()", "exists\") def delete_s3_bucket(bucket_name): \"\"\" Delete an S3 bucket together with", "page: yield result def create_s3_bucket(bucket_name, region): \"\"\" Create a new", "\"CREATE_COMPLETE\": LOGGER.critical(\"\\nCluster creation failed. Failed events:\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\") for", "check_if_latest_version(): \"\"\"Check if the current package version is the latest", "memory and a file handler is returned by the function.", "http://aws.amazon.com/apache2.0/ # # or in the \"LICENSE.txt\" file accompanying this", "standard_library.install_aliases() # fmt: on import json import logging import os", "or in the \"LICENSE.txt\" file accompanying this file. This file", "Get pricing file (if none) and parse content as json.", "all stored objects. :param bucket_name: name of the S3 bucket", "bucket to create :param region: aws region \"\"\" s3_client =", "in given availability zone for the given EFS file system", "this file. This file is distributed on an \"AS IS\"", "Rights Reserved. # # Licensed under the Apache License, Version", "if the current package version is the latest one.\"\"\" try:", "specific language governing permissions and # limitations under the License.", "a json object representing the file content :raises ClientError if", "in root dir will be uploaded as zip files to", "the specified S3 bucket the content of the directory rooted", "(events.get(\"LogicalResourceId\"), events.get(\"ResourceStatus\"))).ljust( 80 ) sys.stdout.write(\"\\r%s\" % resource_status) sys.stdout.flush() time.sleep(5) #", "Apache License, Version 2.0 (the \"License\"). You may not use", "the file :raises ValueError if unable to decode the file", "+ cluster_name def get_region(): \"\"\"Get AWS_DEFAULT_REGION from the environment.\"\"\" return", "Get number of vcpus for the given instance type. :param", "\"\"\" Get parameter value from Cloudformation Stack Parameters. :param params:", "the function. :param path: directory containing the resources to archive.", ":param path: directory containing the resources to archive. :return file", "\"ubuntu1804\" def get_supported_schedulers(): \"\"\" Return a tuple of the scheduler", "def get_avail_zone(subnet_id): avail_zone = None try: avail_zone = ( boto3.client(\"ec2\").describe_subnets(SubnetIds=[subnet_id]).get(\"Subnets\")[0].get(\"AvailabilityZone\")", "bucket where files are uploaded :param root: root directory containing", "ip_address is None: error(\"MasterServer: %s\\nCannot get ip address.\", state.upper()) return", "the given instance type. :param region: AWS Region :param instance_type:", "and Raise SystemExit exception to the stderr if fail_on_error is", "= (\"Status: %s - %s\" % (events.get(\"LogicalResourceId\"), events.get(\"ResourceStatus\"))).ljust( 80 )", "root, _, files in os.walk(path): for file in files: ziph.write(os.path.join(root,", "latest = json.loads(urllib.request.urlopen(\"https://pypi.python.org/pypi/aws-parallelcluster/json\").read())[ \"info\" ][\"version\"] if get_installed_version() < latest: print(\"Info:", "Licensed under the Apache License, Version 2.0 (the \"License\"). You", "\"sge\", \"torque\", \"slurm\", \"awsbatch\" def get_stack_output_value(stack_outputs, output_key): \"\"\" Get output", "bucket together with all stored objects. :param bucket_name: name of", "{ \"instances\": [\"r3.8xlarge\", ..., \"m5.4xlarge\"] } } } :param region:", "\"InvalidAccessKeyId\": error(e.response.get(\"Error\").get(\"Message\")) error( \"Failed validate {0}. This is probably a", "boto3.client(\"ssm\") .get_parameters_by_path(Path=\"/aws/service/ami-amazon-linux-latest\") .get(\"Parameters\")[0] .get(\"Value\") ) except ClientError as e: error(\"Unable", "as e: LOGGER.debug( \"Unable to detect availability zone for subnet", "ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), start=path)) file_out.seek(0) return file_out def upload_resources_artifacts(bucket_name,", "\"\"\"Get the version of the installed aws-parallelcluster package.\"\"\" return pkg_resources.get_distribution(\"aws-parallelcluster\").version", "is None: error(\"MasterServer: %s\\nCannot get ip address.\", state.upper()) return ip_address", "\"\"\" Return a tuple of the scheduler supported by parallelcluster.", "ip.\".format(cluster_name)) if not username: error(\"Failed to get cluster {0} username.\".format(cluster_name))", "- %s\" % (events.get(\"LogicalResourceId\"), events.get(\"ResourceStatus\"))).ljust( 80 ) sys.stdout.write(\"\\r%s\" % resource_status)", "params if i.get(\"ParameterKey\") == key_name), \"NONE\") return param_value.strip() def get_efs_mount_target_id(efs_fs_id,", "future import standard_library # isort:skip standard_library.install_aliases() # fmt: on import", "on EC2, independent by the region.\"\"\" return boto3.client(\"ec2\").meta.service_model.shape_for(\"InstanceType\").enum def get_master_server_id(stack_name):", "= get_master_server_id(stack_name) if not master_id: error(\"MasterServer not running. Can't SSH\")", "a list of all the instance types available on EC2,", "not use this file except in compliance with # the", "\"\"\"Return the physical id of the master server, or []", "else \"\" return \"https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/\".format( REGION=region, S3_SUFFIX=s3_suffix ) def get_installed_version(): \"\"\"Get", "if os.path.isdir(os.path.join(root, res)): bucket.upload_fileobj(zip_dir(os.path.join(root, res)), \"%s/artifacts.zip\" % res) elif os.path.isfile(os.path.join(root,", "supported os :return: a tuple of strings of the supported", "i in params if i.get(\"ParameterKey\") == key_name), \"NONE\") return param_value.strip()", "mount_target_subnet = mount_target.get(\"SubnetId\") if avail_zone == get_avail_zone(mount_target_subnet): mount_target_id = mount_target.get(\"MountTargetId\")", "ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) def _get_master_server_ip(stack_name): \"\"\" Get the IP", "%s. Please delete it manually.\" % bucket_name) def zip_dir(path): \"\"\"", "error(e.response.get(\"Error\").get(\"Message\")) return master_ip, username def get_cli_log_file(): return os.path.expanduser(os.path.join(\"~\", \".parallelcluster\", \"pcluster-cli.log\"))", "output for a DescribeStacks action for the given Stack. :param", "the pricing file cannot be retrieved/parsed \"\"\" try: instances =", "error(e.response.get(\"Error\").get(\"Message\")) error( \"Failed validate {0}. This is probably a bug", "param_value.strip() def get_efs_mount_target_id(efs_fs_id, avail_zone): \"\"\" Search for a Mount Target", "subnet {0}.\\n{1}\".format( subnet_id, e.response.get(\"Error\").get(\"Message\") ) ) return avail_zone def get_latest_alinux_ami_id():", "Id in given availability zone for the given EFS file", "to search for. :return: the number of vcpus or -1", "in params if i.get(\"ParameterKey\") == key_name), \"NONE\") return param_value.strip() def", "zip_dir(path): \"\"\" Create a zip archive containing all files and", "the S3 bucket where files are uploaded :param root: root", "get cluster {0} ip.\".format(cluster_name)) if not username: error(\"Failed to get", "root): \"\"\" Upload to the specified S3 bucket the content", "ParameterValue if that parameter exists, otherwise None \"\"\" param_value =", "root dir will be uploaded to $bucket_name. :param bucket_name: name", "type \"\"\" try: if not cfn_client: cfn_client = boto3.client(\"cloudformation\") return", "return \"https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/\".format( REGION=region, S3_SUFFIX=s3_suffix ) def get_installed_version(): \"\"\"Get the version", "def get_partition(): \"\"\"Get partition for the AWS_DEFAULT_REGION set in the", "the \"LICENSE.txt\" file accompanying this file. This file is distributed", "bucket_name = \"{0}-aws-parallelcluster\".format(region) file_contents = boto3.resource(\"s3\").Object(bucket_name, file_name).get()[\"Body\"].read().decode(\"utf-8\") return json.loads(file_contents) def", "next((i.get(\"ParameterValue\") for i in params if i.get(\"ParameterKey\") == key_name), \"NONE\")", "stack_result.get(\"Outputs\") master_ip = get_stack_output_value(outputs, \"MasterPublicIP\") or _get_master_server_ip(stack_name) username = get_stack_output_value(outputs,", "S3 bucket where files are uploaded :param root: root directory", "for o in stack_outputs if o.get(\"OutputKey\") == output_key), None) def", ":param instance_type: the instance type to search for. :return: the", "alinux_ami_id = ( boto3.client(\"ssm\") .get_parameters_by_path(Path=\"/aws/service/ami-amazon-linux-latest\") .get(\"Parameters\")[0] .get(\"Value\") ) except ClientError", "\"schedulers\": [\"sge\", \"slurm\", \"torque\"] }, \"batch\": { \"instances\": [\"r3.8xlarge\", ...,", "and notify if the stack creation fails. :param stack_name: the", "of vcpus for the given instance type. :param region: AWS", "!= \"us-east-1\": s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={\"LocationConstraint\": region}) else: s3_client.create_bucket(Bucket=bucket_name) except s3_client.exceptions.BucketAlreadyOwnedByYou: print(\"Bucket", "if fail_on_error: sys.exit(\"ERROR: {0}\".format(message)) else: print(\"ERROR: {0}\".format(message)) def get_cfn_param(params, key_name):", "BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND, express", "Delete an S3 bucket together with all stored objects. :param", "a zip archive containing all files and dirs rooted in", "root dir will be uploaded as zip files to $bucket_name/$dir_name/artifacts.zip.", "= method.__self__ paginator = client.get_paginator(method.__name__) for page in paginator.paginate(**kwargs).result_key_iters(): for", "return os.environ.get(\"AWS_DEFAULT_REGION\") def get_partition(): \"\"\"Get partition for the AWS_DEFAULT_REGION set", "elif stack_status in valid_status: outputs = stack_result.get(\"Outputs\") master_ip = get_stack_output_value(outputs,", "} :param region: AWS Region :param feature: the feature to", "name of the S3 bucket to create :param region: aws", "_get_master_server_ip(stack_name) template = cfn.get_template(StackName=stack_name) mappings = template.get(\"TemplateBody\").get(\"Mappings\").get(\"OSFeatures\") base_os = get_cfn_param(stack_result.get(\"Parameters\"),", "{0} username.\".format(cluster_name)) except ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) return master_ip, username", "if not username: error(\"Failed to get cluster {0} username.\".format(cluster_name)) except", "at # # http://aws.amazon.com/apache2.0/ # # or in the \"LICENSE.txt\"", "cluster {0} ip.\".format(cluster_name)) if not username: error(\"Failed to get cluster", "Region :param file_name the object name to get :return: a", "status != \"CREATE_COMPLETE\": LOGGER.critical(\"\\nCluster creation failed. Failed events:\") events =", ":param efs_fs_id: EFS file system Id :param avail_zone: Availability zone", "an issue {1}\".format(feature, PCLUSTER_ISSUES_LINK) ) return supported_features def get_instance_vcpus(region, instance_type):", "file_name).get()[\"Body\"].read().decode(\"utf-8\") return json.loads(file_contents) def get_supported_features(region, feature): \"\"\" Get a json", "} } :param region: AWS Region :param feature: the feature", "delete it manually.\" % bucket_name) def zip_dir(path): \"\"\" Create a", "ClientError): code = e.response.get(\"Error\").get(\"Code\") if code == \"InvalidAccessKeyId\": error(e.response.get(\"Error\").get(\"Message\")) error(", "resources to archive. :return file handler pointing to the compressed", "= boto3.resource(\"s3\").Object(bucket_name, file_name).get()[\"Body\"].read().decode(\"utf-8\") return json.loads(file_contents) def get_supported_features(region, feature): \"\"\" Get", "scheduler: the scheduler for which we want to know the", ") sys.stdout.write(\"\\r%s\" % resource_status) sys.stdout.flush() time.sleep(5) # print the last", "is the latest one.\"\"\" try: latest = json.loads(urllib.request.urlopen(\"https://pypi.python.org/pypi/aws-parallelcluster/json\").read())[ \"info\" ][\"version\"]", "res)): bucket.upload_file(os.path.join(root, res), res) def _get_json_from_s3(region, file_name): \"\"\" Get pricing", "return supported_features def get_instance_vcpus(region, instance_type): \"\"\" Get number of vcpus", "# Check to see if there is an existing mt", "notify if the stack creation fails. :param stack_name: the stack", "object :return private/public ip address \"\"\" ec2 = boto3.client(\"ec2\") master_id", "\"\"\" :type : pyboto3.s3 \"\"\" try: if region != \"us-east-1\":", "= template.get(\"TemplateBody\").get(\"Mappings\").get(\"OSFeatures\") base_os = get_cfn_param(stack_result.get(\"Parameters\"), \"BaseOS\") username = mappings.get(base_os).get(\"User\") if", "current package version is the latest one.\"\"\" try: latest =", "by a feature, for example. { \"Features\": { \"efa\": {", "uploaded :param root: root directory containing the resources to upload.", "the given Stack. :param stack_name: the CFN Stack name :param", "ClientError LOGGER = logging.getLogger(__name__) PCLUSTER_STACK_PREFIX = \"parallelcluster-\" PCLUSTER_ISSUES_LINK = \"https://github.com/aws/aws-parallelcluster/issues\"", "status :return: True if the creation was successful, false otherwise.", "= ( boto3.client(\"ssm\") .get_parameters_by_path(Path=\"/aws/service/ami-amazon-linux-latest\") .get(\"Parameters\")[0] .get(\"Value\") ) except ClientError as", "latest) except Exception: pass def warn(message): \"\"\"Print a warning message.\"\"\"", "accompanying this file. This file is distributed on an \"AS", "an existing mt in the az of the stack mount_target_subnet", "to download the file :raises ValueError if unable to decode", "decode the file content \"\"\" bucket_name = \"{0}-aws-parallelcluster\".format(region) file_contents =", "region = get_region() s3_suffix = \".cn\" if region.startswith(\"cn\") else \"\"", "method.__self__ paginator = client.get_paginator(method.__name__) for page in paginator.paginate(**kwargs).result_key_iters(): for result", "located at # # http://aws.amazon.com/apache2.0/ # # or in the", "get_stack_name(cluster_name): return PCLUSTER_STACK_PREFIX + cluster_name def get_region(): \"\"\"Get AWS_DEFAULT_REGION from", "scheduler supported by parallelcluster. :return: a tuple of strings of", "bucket %s. Please delete it manually.\" % bucket_name) def zip_dir(path):", "an error message and Raise SystemExit exception to the stderr", "efs_fs_id: mount_targets = boto3.client(\"efs\").describe_mount_targets(FileSystemId=efs_fs_id) for mount_target in mount_targets.get(\"MountTargets\"): # Check", "<gh_stars>1-10 # Copyright 2018 Amazon.com, Inc. or its affiliates. All", "\"alinux\", \"centos6\", \"centos7\", \"ubuntu1604\", \"ubuntu1804\" def get_supported_schedulers(): \"\"\" Return a", "# isort:skip standard_library.install_aliases() # fmt: on import json import logging", "manually.\" % bucket_name) def zip_dir(path): \"\"\" Create a zip archive", "= \".cn\" if region.startswith(\"cn\") else \"\" return \"https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/\".format( REGION=region, S3_SUFFIX=s3_suffix", "- %s %s %s\", event.get(\"ResourceType\"), event.get(\"LogicalResourceId\"), event.get(\"ResourceStatusReason\"), ) return False", "path. All dirs contained in root dir will be uploaded", "Get a json object containing the attributes supported by a", "DescribeStacks action for the given Stack. :param stack_name: the CFN", "parallelcluster. :return: a tuple of strings of the supported scheduler", "of the stack mount_target_subnet = mount_target.get(\"SubnetId\") if avail_zone == get_avail_zone(mount_target_subnet):", "region: AWS Region :param instance_type: the instance type to search", "s3_client.exceptions.BucketAlreadyOwnedByYou: print(\"Bucket already exists\") def delete_s3_bucket(bucket_name): \"\"\" Delete an S3", "the version of the installed aws-parallelcluster package.\"\"\" return pkg_resources.get_distribution(\"aws-parallelcluster\").version def", ":return file handler pointing to the compressed archive. \"\"\" file_out", "for a boto3 call, this allows pagination over an arbitrary", "as zip files to $bucket_name/$dir_name/artifacts.zip. All files contained in root", "under the Apache License, Version 2.0 (the \"License\"). You may", "get_region() s3_suffix = \".cn\" if region.startswith(\"cn\") else \"\" return \"https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/\".format(", "a tuple of the scheduler supported by parallelcluster. :return: a", "avail_zone): \"\"\" Search for a Mount Target Id in given", "file :raises ValueError if unable to decode the file content", ":param file_name the object name to get :return: a json", "\"\"\" param_value = next((i.get(\"ParameterValue\") for i in params if i.get(\"ParameterKey\")", "stack mount_target_subnet = mount_target.get(\"SubnetId\") if avail_zone == get_avail_zone(mount_target_subnet): mount_target_id =", "stack_name = get_stack_name(cluster_name) stack_result = cfn.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] stack_status = stack_result.get(\"StackStatus\") valid_status", "get_installed_version(): \"\"\"Get the version of the installed aws-parallelcluster package.\"\"\" return", "time.sleep(5) # print the last status update in the logs", "bug on our end. \" \"Please submit an issue {1}\".format(feature,", "def check_if_latest_version(): \"\"\"Check if the current package version is the", "cfn_client.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] except (ClientError, IndexError) as e: error(e.response.get(\"Error\").get(\"Message\")) def verify_stack_creation(stack_name, cfn_client):", "BytesIO import boto3 import pkg_resources from botocore.exceptions import ClientError LOGGER", "get ip address.\", state.upper()) return ip_address def get_master_ip_and_username(cluster_name): cfn =", "[\"c5n.18xlarge\", \"p3dn.24xlarge\", \"i3en.24xlarge\"], \"baseos\": [\"alinux\", \"centos7\"], \"schedulers\": [\"sge\", \"slurm\", \"torque\"]", "boto3.client(\"ec2\") master_id = get_master_server_id(stack_name) if not master_id: error(\"MasterServer not running.", "stack_name: the CFN Stack name :param cfn_client: boto3 cloudformation client", "we should verify :param cfn_client: the CloudFormation client to use", "boto3.client(\"cloudformation\") try: stack_name = get_stack_name(cluster_name) stack_result = cfn.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] stack_status =", "# isort:skip from future import standard_library # isort:skip standard_library.install_aliases() #", "return mount_target_id def get_avail_zone(subnet_id): avail_zone = None try: avail_zone =", "\"instances\": [\"c5n.18xlarge\", \"p3dn.24xlarge\", \"i3en.24xlarge\"], \"baseos\": [\"alinux\", \"centos7\"], \"schedulers\": [\"sge\", \"slurm\",", "a boto3 call, this allows pagination over an arbitrary number", "parameter exists, otherwise None \"\"\" param_value = next((i.get(\"ParameterValue\") for i", "in the environment.\"\"\" return \"aws-us-gov\" if get_region().startswith(\"us-gov\") else \"aws\" def", "\"w\", zipfile.ZIP_DEFLATED) as ziph: for root, _, files in os.walk(path):", "avail_zone: Availability zone to verify :return: the mount_target_id or None", "string containing the path of bucket.\"\"\" region = get_region() s3_suffix", "of the supported scheduler \"\"\" return \"sge\", \"torque\", \"slurm\", \"awsbatch\"", "of the installed aws-parallelcluster package.\"\"\" return pkg_resources.get_distribution(\"aws-parallelcluster\").version def check_if_latest_version(): \"\"\"Check", "the file content :raises ClientError if unable to download the", "Amazon.com, Inc. or its affiliates. All Rights Reserved. # #", "creation to be completed and notify if the stack creation", "the compressed archive. \"\"\" file_out = BytesIO() with zipfile.ZipFile(file_out, \"w\",", "import urllib.request import zipfile from io import BytesIO import boto3", "pagination over an arbitrary number of responses. :param method: boto3", "for page in paginator.paginate(**kwargs).result_key_iters(): for result in page: yield result", "resource_status = (\"Status: %s - %s\" % (events.get(\"LogicalResourceId\"), events.get(\"ResourceStatus\"))).ljust( 80", "running master_ip = _get_master_server_ip(stack_name) template = cfn.get_template(StackName=stack_name) mappings = template.get(\"TemplateBody\").get(\"Mappings\").get(\"OSFeatures\")", "int(instances[instance_type][\"vcpus\"]) except (KeyError, ValueError, ClientError): vcpus = -1 return vcpus", "specified S3 bucket the content of the directory rooted in", "name :param cfn_client: boto3 cloudformation client :return: the Stack data", "SSH\") instance = ec2.describe_instances(InstanceIds=[master_id]).get(\"Reservations\")[0].get(\"Instances\")[0] ip_address = instance.get(\"PublicIpAddress\") if ip_address is", "Inc. or its affiliates. All Rights Reserved. # # Licensed", "get_avail_zone(subnet_id): avail_zone = None try: avail_zone = ( boto3.client(\"ec2\").describe_subnets(SubnetIds=[subnet_id]).get(\"Subnets\")[0].get(\"AvailabilityZone\") )", "instance types available on EC2, independent by the region.\"\"\" return", "output value from Cloudformation Stack Output. :param stack_outputs: Cloudformation Stack", "Return a tuple of the os supported by parallelcluster for", "partition for the AWS_DEFAULT_REGION set in the environment.\"\"\" return \"aws-us-gov\"", "the stack creation fails. :param stack_name: the stack name that", "list of all the instance types available on EC2, independent", "AWS Region :param instance_type: the instance type to search for.", "to the specified S3 bucket the content of the directory", "use this file except in compliance with # the License.", "get_partition(): \"\"\"Get partition for the AWS_DEFAULT_REGION set in the environment.\"\"\"", "bucket_name) def zip_dir(path): \"\"\" Create a zip archive containing all", "[\"r3.8xlarge\", ..., \"m5.4xlarge\"] } } } :param region: AWS Region", "page in paginator.paginate(**kwargs).result_key_iters(): for result in page: yield result def", "download the file :raises ValueError if unable to decode the", "def get_stack(stack_name, cfn_client=None): \"\"\" Get the output for a DescribeStacks", "result def create_s3_bucket(bucket_name, region): \"\"\" Create a new S3 bucket.", "ClientError: print(\"Failed to delete bucket %s. Please delete it manually.\"", "object name to get :return: a json object representing the", "= mappings.get(base_os).get(\"User\") if not master_ip: error(\"Failed to get cluster {0}", "== \"awsbatch\" else \"alinux\", \"centos6\", \"centos7\", \"ubuntu1604\", \"ubuntu1804\" def get_supported_schedulers():", "the CloudFormation client to use to verify stack status :return:", "file system Id :param avail_zone: Availability zone to verify :return:", "events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\") for event in events: if event.get(\"ResourceStatus\") ==", "the file content \"\"\" bucket_name = \"{0}-aws-parallelcluster\".format(region) file_contents = boto3.resource(\"s3\").Object(bucket_name,", "of strings of the supported os \"\"\" return \"alinux\" if", "aws-parallelcluster package.\"\"\" return pkg_resources.get_distribution(\"aws-parallelcluster\").version def check_if_latest_version(): \"\"\"Check if the current", "\"\"\" file_out = BytesIO() with zipfile.ZipFile(file_out, \"w\", zipfile.ZIP_DEFLATED) as ziph:", ":param bucket_name: name of the S3 bucket to create :param", "cannot be found or the pricing file cannot be retrieved/parsed", "number of vcpus for the given instance type. :param region:", "available on EC2, independent by the region.\"\"\" return boto3.client(\"ec2\").meta.service_model.shape_for(\"InstanceType\").enum def", "def verify_stack_creation(stack_name, cfn_client): \"\"\" Wait for the stack creation to", "off from __future__ import absolute_import, print_function # isort:skip from future", "the CFN Stack name :param cfn_client: boto3 cloudformation client :return:", "LOGGER = logging.getLogger(__name__) PCLUSTER_STACK_PREFIX = \"parallelcluster-\" PCLUSTER_ISSUES_LINK = \"https://github.com/aws/aws-parallelcluster/issues\" def", "Version 2.0 (the \"License\"). You may not use this file", "a feature, for example. { \"Features\": { \"efa\": { \"instances\":", "mount_target.get(\"MountTargetId\") return mount_target_id def get_avail_zone(subnet_id): avail_zone = None try: avail_zone", "def get_efs_mount_target_id(efs_fs_id, avail_zone): \"\"\" Search for a Mount Target Id", "failed. Failed events:\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\") for event in events:", "rooted in root path. All dirs contained in root dir", "json object containing the attributes supported by a feature, for", "the region.\"\"\" return boto3.client(\"ec2\").meta.service_model.shape_for(\"InstanceType\").enum def get_master_server_id(stack_name): \"\"\"Return the physical id", "Create a zip archive containing all files and dirs rooted", "\"\"\" s3_client = boto3.client(\"s3\") \"\"\" :type : pyboto3.s3 \"\"\" try:", "stack status :return: True if the creation was successful, false", "\"LICENSE.txt\" file accompanying this file. This file is distributed on", "{ \"Features\": { \"efa\": { \"instances\": [\"c5n.18xlarge\", \"p3dn.24xlarge\", \"i3en.24xlarge\"], \"baseos\":", "The name of the cloudformation stack :param config: Config object", "availability zone for the given EFS file system id. :param", "if stack_status in invalid_status: error(\"Unable to retrieve master_ip and username", "and username for a stack in the status: {0}\".format(stack_status)) elif", "that parameter exists, otherwise None \"\"\" param_value = next((i.get(\"ParameterValue\") for", "in mount_targets.get(\"MountTargets\"): # Check to see if there is an", "= cfn.get_template(StackName=stack_name) mappings = template.get(\"TemplateBody\").get(\"Mappings\").get(\"OSFeatures\") base_os = get_cfn_param(stack_result.get(\"Parameters\"), \"BaseOS\") username", "def get_region(): \"\"\"Get AWS_DEFAULT_REGION from the environment.\"\"\" return os.environ.get(\"AWS_DEFAULT_REGION\") def", "code = e.response.get(\"Error\").get(\"Code\") if code == \"InvalidAccessKeyId\": error(e.response.get(\"Error\").get(\"Message\")) error( \"Failed", "yield result def create_s3_bucket(bucket_name, region): \"\"\" Create a new S3", "LOGGER.debug( \"Unable to detect availability zone for subnet {0}.\\n{1}\".format( subnet_id,", "status = get_stack(stack_name, cfn_client).get(\"StackStatus\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")[0] resource_status = (\"Status:", "for, i.e. \"efa\" \"awsbatch\" :return: json object containing all the", "ClientError as e: LOGGER.debug( \"Unable to detect availability zone for", "[\"DELETE_COMPLETE\", \"DELETE_IN_PROGRESS\"] if stack_status in invalid_status: error(\"Unable to retrieve master_ip", "(ValueError, ClientError, KeyError) as e: if isinstance(e, ClientError): code =", "the latest one.\"\"\" try: latest = json.loads(urllib.request.urlopen(\"https://pypi.python.org/pypi/aws-parallelcluster/json\").read())[ \"info\" ][\"version\"] if", "ip address \"\"\" ec2 = boto3.client(\"ec2\") master_id = get_master_server_id(stack_name) if", "None if efs_fs_id: mount_targets = boto3.client(\"efs\").describe_mount_targets(FileSystemId=efs_fs_id) for mount_target in mount_targets.get(\"MountTargets\"):", "handler pointing to the compressed archive. \"\"\" file_out = BytesIO()", "None \"\"\" param_value = next((i.get(\"ParameterValue\") for i in params if", "def _get_master_server_ip(stack_name): \"\"\" Get the IP Address of the MasterServer.", "= int(instances[instance_type][\"vcpus\"]) except (KeyError, ValueError, ClientError): vcpus = -1 return", "stack creation to be completed and notify if the stack", "in the az of the stack mount_target_subnet = mount_target.get(\"SubnetId\") if", "installed aws-parallelcluster package.\"\"\" return pkg_resources.get_distribution(\"aws-parallelcluster\").version def check_if_latest_version(): \"\"\"Check if the", "dir will be uploaded to $bucket_name. :param bucket_name: name of", "that we should verify :param cfn_client: the CloudFormation client to", "is located at # # http://aws.amazon.com/apache2.0/ # # or in", "created in memory and a file handler is returned by", "generator for a boto3 call, this allows pagination over an", "pricing file (if none) and parse content as json. :param", "that output exists, otherwise None \"\"\" return next((o.get(\"OutputValue\") for o", "file_out def upload_resources_artifacts(bucket_name, root): \"\"\" Upload to the specified S3", "the AWS_DEFAULT_REGION set in the environment.\"\"\" return \"aws-us-gov\" if get_region().startswith(\"us-gov\")", ":return: a tuple of strings of the supported scheduler \"\"\"", "return json.loads(file_contents) def get_supported_features(region, feature): \"\"\" Get a json object", "the MasterServer. :param stack_name: The name of the cloudformation stack", "resources to upload. \"\"\" bucket = boto3.resource(\"s3\").Bucket(bucket_name) for res in", "if state != \"running\" or ip_address is None: error(\"MasterServer: %s\\nCannot", "with all stored objects. :param bucket_name: name of the S3", "of the os supported by parallelcluster for the specific scheduler.", "= cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\") for event in events: if event.get(\"ResourceStatus\") == \"CREATE_FAILED\":", "= stack_result.get(\"Outputs\") master_ip = get_stack_output_value(outputs, \"MasterPublicIP\") or _get_master_server_ip(stack_name) username =", "is a newer version %s of AWS ParallelCluster available.\" %", "== \"CREATE_IN_PROGRESS\": status = get_stack(stack_name, cfn_client).get(\"StackStatus\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")[0] resource_status", "by parallelcluster. :return: a tuple of strings of the supported", "with zipfile.ZipFile(file_out, \"w\", zipfile.ZIP_DEFLATED) as ziph: for root, _, files", "print the last status update in the logs if resource_status", ":return: generator with boto3 results \"\"\" client = method.__self__ paginator", "\"info\" ][\"version\"] if get_installed_version() < latest: print(\"Info: There is a", "get_latest_alinux_ami_id(): \"\"\"Get latest alinux ami id.\"\"\" try: alinux_ami_id = (", "instance type cannot be found or the pricing file cannot", "e: error(e.response.get(\"Error\").get(\"Message\")) def _get_master_server_ip(stack_name): \"\"\" Get the IP Address of", "}, \"batch\": { \"instances\": [\"r3.8xlarge\", ..., \"m5.4xlarge\"] } } }", "a generator for a boto3 call, this allows pagination over", "get_stack_name(cluster_name) stack_result = cfn.describe_stacks(StackName=stack_name).get(\"Stacks\")[0] stack_status = stack_result.get(\"StackStatus\") valid_status = [\"CREATE_COMPLETE\",", "def create_s3_bucket(bucket_name, region): \"\"\" Create a new S3 bucket. :param", "scheduler \"\"\" return \"sge\", \"torque\", \"slurm\", \"awsbatch\" def get_stack_output_value(stack_outputs, output_key):", "res), res) def _get_json_from_s3(region, file_name): \"\"\" Get pricing file (if", "the License is located at # # http://aws.amazon.com/apache2.0/ # #", "else: s3_client.create_bucket(Bucket=bucket_name) except s3_client.exceptions.BucketAlreadyOwnedByYou: print(\"Bucket already exists\") def delete_s3_bucket(bucket_name): \"\"\"", "client.get_paginator(method.__name__) for page in paginator.paginate(**kwargs).result_key_iters(): for result in page: yield", "an arbitrary number of responses. :param method: boto3 method :param", "def get_instance_vcpus(region, instance_type): \"\"\" Get number of vcpus for the", "Exception: pass def warn(message): \"\"\"Print a warning message.\"\"\" print(\"WARNING: {0}\".format(message))", "as e: error(e.response.get(\"Error\").get(\"Message\")) def verify_stack_creation(stack_name, cfn_client): \"\"\" Wait for the", "print(\"ERROR: {0}\".format(message)) def get_cfn_param(params, key_name): \"\"\" Get parameter value from", "import sys import time import urllib.request import zipfile from io", "environment.\"\"\" return os.environ.get(\"AWS_DEFAULT_REGION\") def get_partition(): \"\"\"Get partition for the AWS_DEFAULT_REGION", "newer version %s of AWS ParallelCluster available.\" % latest) except", "to retrieve Amazon Linux AMI id.\\n{0}\".format(e.response.get(\"Error\").get(\"Message\"))) return alinux_ami_id def list_ec2_instance_types():", "or ip_address is None: error(\"MasterServer: %s\\nCannot get ip address.\", state.upper())", "directory containing the resources to upload. \"\"\" bucket = boto3.resource(\"s3\").Bucket(bucket_name)", "of the S3 bucket where files are uploaded :param root:", ") return False return True def get_templates_bucket_path(): \"\"\"Return a string", "to delete \"\"\" try: bucket = boto3.resource(\"s3\").Bucket(bucket_name) bucket.objects.all().delete() bucket.delete() except", "root path. All dirs contained in root dir will be", "i.get(\"ParameterKey\") == key_name), \"NONE\") return param_value.strip() def get_efs_mount_target_id(efs_fs_id, avail_zone): \"\"\"", "Return a generator for a boto3 call, this allows pagination", "or the pricing file cannot be retrieved/parsed \"\"\" try: instances", "the mount_target_id or None \"\"\" mount_target_id = None if efs_fs_id:", "License. A copy of the License is located at #", "paginator.paginate(**kwargs).result_key_iters(): for result in page: yield result def create_s3_bucket(bucket_name, region):", ") except ClientError as e: LOGGER.debug( \"Unable to detect availability", "-1 return vcpus def get_supported_os(scheduler): \"\"\" Return a tuple of", "az of the stack mount_target_subnet = mount_target.get(\"SubnetId\") if avail_zone ==", ") except ClientError as e: error(\"Unable to retrieve Amazon Linux", "%s of AWS ParallelCluster available.\" % latest) except Exception: pass", "all the attributes supported by feature \"\"\" try: features =", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES # OR", "to create :param region: aws region \"\"\" s3_client = boto3.client(\"s3\")", "of the directory rooted in root path. All dirs contained", "stack_name: the stack name that we should verify :param cfn_client:", "and # limitations under the License. # fmt: off from", "event in events: if event.get(\"ResourceStatus\") == \"CREATE_FAILED\": LOGGER.info( \" -", "mount_target_id def get_avail_zone(subnet_id): avail_zone = None try: avail_zone = (", "if get_installed_version() < latest: print(\"Info: There is a newer version", "KeyError) as e: if isinstance(e, ClientError): code = e.response.get(\"Error\").get(\"Code\") if", "= ( boto3.client(\"ec2\").describe_subnets(SubnetIds=[subnet_id]).get(\"Subnets\")[0].get(\"AvailabilityZone\") ) except ClientError as e: LOGGER.debug( \"Unable", "the number of vcpus or -1 if the instance type", "scheduler for which we want to know the supported os", "bucket the content of the directory rooted in root path.", "\"p3dn.24xlarge\", \"i3en.24xlarge\"], \"baseos\": [\"alinux\", \"centos7\"], \"schedulers\": [\"sge\", \"slurm\", \"torque\"] },", "cloudformation client :return: the Stack data type \"\"\" try: if", "content \"\"\" bucket_name = \"{0}-aws-parallelcluster\".format(region) file_contents = boto3.resource(\"s3\").Object(bucket_name, file_name).get()[\"Body\"].read().decode(\"utf-8\") return", "completed and notify if the stack creation fails. :param stack_name:", "None) def get_stack(stack_name, cfn_client=None): \"\"\" Get the output for a", "standard_library # isort:skip standard_library.install_aliases() # fmt: on import json import", "get_efs_mount_target_id(efs_fs_id, avail_zone): \"\"\" Search for a Mount Target Id in", "% res) elif os.path.isfile(os.path.join(root, res)): bucket.upload_file(os.path.join(root, res), res) def _get_json_from_s3(region,", "PCLUSTER_ISSUES_LINK) ) return supported_features def get_instance_vcpus(region, instance_type): \"\"\" Get number", "instance_type): \"\"\" Get number of vcpus for the given instance", "system Id :param avail_zone: Availability zone to verify :return: the", "tuple of strings of the supported scheduler \"\"\" return \"sge\",", "None \"\"\" mount_target_id = None if efs_fs_id: mount_targets = boto3.client(\"efs\").describe_mount_targets(FileSystemId=efs_fs_id)", "event.get(\"LogicalResourceId\"), event.get(\"ResourceStatusReason\"), ) return False return True def get_templates_bucket_path(): \"\"\"Return", "You may not use this file except in compliance with", ":return: a tuple of strings of the supported os \"\"\"", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES # OR CONDITIONS", "\"awsbatch\" :return: json object containing all the attributes supported by", "resources.get(\"StackResourceDetail\").get(\"PhysicalResourceId\") except ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) def _get_master_server_ip(stack_name): \"\"\" Get", "# print the last status update in the logs if", ":param region: AWS Region :param file_name the object name to", "\"CREATE_IN_PROGRESS\": status = get_stack(stack_name, cfn_client).get(\"StackStatus\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")[0] resource_status =", "in paginator.paginate(**kwargs).result_key_iters(): for result in page: yield result def create_s3_bucket(bucket_name,", "\"{0}-aws-parallelcluster\".format(region) file_contents = boto3.resource(\"s3\").Object(bucket_name, file_name).get()[\"Body\"].read().decode(\"utf-8\") return json.loads(file_contents) def get_supported_features(region, feature):", "for event in events: if event.get(\"ResourceStatus\") == \"CREATE_FAILED\": LOGGER.info( \"", "feature, for example. { \"Features\": { \"efa\": { \"instances\": [\"c5n.18xlarge\",", "\"AS IS\" BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY", "e: LOGGER.debug( \"Unable to detect availability zone for subnet {0}.\\n{1}\".format(", "\"\"\"Print an error message and Raise SystemExit exception to the", "if unable to decode the file content \"\"\" bucket_name =", "from Cloudformation Stack Parameters. :param params: Cloudformation Stack Parameters :param", "is true.\"\"\" if fail_on_error: sys.exit(\"ERROR: {0}\".format(message)) else: print(\"ERROR: {0}\".format(message)) def", "get_stack_output_value(outputs, \"ClusterUser\") else: # Stack is in CREATING, CREATED_FAILED, or", "MasterServer is running master_ip = _get_master_server_ip(stack_name) template = cfn.get_template(StackName=stack_name) mappings", "\"\"\" Get the output for a DescribeStacks action for the", "of vcpus or -1 if the instance type cannot be", "supported_features = features.get(\"Features\").get(feature) except (ValueError, ClientError, KeyError) as e: if", "The archive is created in memory and a file handler", "mount_target_id or None \"\"\" mount_target_id = None if efs_fs_id: mount_targets", "file except in compliance with # the License. A copy", "o.get(\"OutputKey\") == output_key), None) def get_stack(stack_name, cfn_client=None): \"\"\" Get the", "bucket. :param bucket_name: name of the S3 bucket to create", "e.response.get(\"Error\").get(\"Message\") ) ) return avail_zone def get_latest_alinux_ami_id(): \"\"\"Get latest alinux", "\"\"\" Get number of vcpus for the given instance type.", "res) elif os.path.isfile(os.path.join(root, res)): bucket.upload_file(os.path.join(root, res), res) def _get_json_from_s3(region, file_name):", "resource_status = \"\" while status == \"CREATE_IN_PROGRESS\": status = get_stack(stack_name,", "region.\"\"\" return boto3.client(\"ec2\").meta.service_model.shape_for(\"InstanceType\").enum def get_master_server_id(stack_name): \"\"\"Return the physical id of", "def get_master_ip_and_username(cluster_name): cfn = boto3.client(\"cloudformation\") try: stack_name = get_stack_name(cluster_name) stack_result", "path. The archive is created in memory and a file", "in invalid_status: error(\"Unable to retrieve master_ip and username for a", "for the AWS_DEFAULT_REGION set in the environment.\"\"\" return \"aws-us-gov\" if", "retrieve Amazon Linux AMI id.\\n{0}\".format(e.response.get(\"Error\").get(\"Message\"))) return alinux_ami_id def list_ec2_instance_types(): \"\"\"Return", "( boto3.client(\"ssm\") .get_parameters_by_path(Path=\"/aws/service/ami-amazon-linux-latest\") .get(\"Parameters\")[0] .get(\"Value\") ) except ClientError as e:", "file. This file is distributed on an \"AS IS\" BASIS,", "if i.get(\"ParameterKey\") == key_name), \"NONE\") return param_value.strip() def get_efs_mount_target_id(efs_fs_id, avail_zone):", "set in the environment.\"\"\" return \"aws-us-gov\" if get_region().startswith(\"us-gov\") else \"aws\"", "copy of the License is located at # # http://aws.amazon.com/apache2.0/", "res) def _get_json_from_s3(region, file_name): \"\"\" Get pricing file (if none)", "the Apache License, Version 2.0 (the \"License\"). You may not", "region: AWS Region :param feature: the feature to search for,", "cloudformation stack :param config: Config object :return private/public ip address", "results \"\"\" client = method.__self__ paginator = client.get_paginator(method.__name__) for page", "i.e. \"efa\" \"awsbatch\" :return: json object containing all the attributes", "root directory containing the resources to upload. \"\"\" bucket =", "param_value = next((i.get(\"ParameterValue\") for i in params if i.get(\"ParameterKey\") ==", ":param root: root directory containing the resources to upload. \"\"\"", "def get_stack_name(cluster_name): return PCLUSTER_STACK_PREFIX + cluster_name def get_region(): \"\"\"Get AWS_DEFAULT_REGION", "vcpus = -1 return vcpus def get_supported_os(scheduler): \"\"\" Return a", "urllib.request import zipfile from io import BytesIO import boto3 import", ".get(\"Parameters\")[0] .get(\"Value\") ) except ClientError as e: error(\"Unable to retrieve", "or -1 if the instance type cannot be found or", "return vcpus def get_supported_os(scheduler): \"\"\" Return a tuple of the", "mt in the az of the stack mount_target_subnet = mount_target.get(\"SubnetId\")", "# Stack is in CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE but MasterServer", "Get parameter value from Cloudformation Stack Parameters. :param params: Cloudformation", ":return: the Stack data type \"\"\" try: if not cfn_client:", "pkg_resources from botocore.exceptions import ClientError LOGGER = logging.getLogger(__name__) PCLUSTER_STACK_PREFIX =", "def get_stack_output_value(stack_outputs, output_key): \"\"\" Get output value from Cloudformation Stack", "of the S3 bucket to delete \"\"\" try: bucket =", "from botocore.exceptions import ClientError LOGGER = logging.getLogger(__name__) PCLUSTER_STACK_PREFIX = \"parallelcluster-\"", "available.\" % latest) except Exception: pass def warn(message): \"\"\"Print a", "region != \"us-east-1\": s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={\"LocationConstraint\": region}) else: s3_client.create_bucket(Bucket=bucket_name) except s3_client.exceptions.BucketAlreadyOwnedByYou:", "This file is distributed on an \"AS IS\" BASIS, WITHOUT", "for the given Stack. :param stack_name: the CFN Stack name", "the az of the stack mount_target_subnet = mount_target.get(\"SubnetId\") if avail_zone", "ClientError): vcpus = -1 return vcpus def get_supported_os(scheduler): \"\"\" Return", "\"centos6\", \"centos7\", \"ubuntu1604\", \"ubuntu1804\" def get_supported_schedulers(): \"\"\" Return a tuple", "from io import BytesIO import boto3 import pkg_resources from botocore.exceptions", "given instance type. :param region: AWS Region :param instance_type: the", "if the creation was successful, false otherwise. \"\"\" status =", "ami id.\"\"\" try: alinux_ami_id = ( boto3.client(\"ssm\") .get_parameters_by_path(Path=\"/aws/service/ami-amazon-linux-latest\") .get(\"Parameters\")[0] .get(\"Value\")", ") return resources.get(\"StackResourceDetail\").get(\"PhysicalResourceId\") except ClientError as e: error(e.response.get(\"Error\").get(\"Message\")) def _get_master_server_ip(stack_name):", "to the compressed archive. \"\"\" file_out = BytesIO() with zipfile.ZipFile(file_out,", "\"\"\"Return a string containing the path of bucket.\"\"\" region =", "the attributes supported by feature \"\"\" try: features = _get_json_from_s3(region,", "except Exception: pass def warn(message): \"\"\"Print a warning message.\"\"\" print(\"WARNING:", "mount_targets.get(\"MountTargets\"): # Check to see if there is an existing", "to get cluster {0} ip.\".format(cluster_name)) if not username: error(\"Failed to", "\"\"\" Search for a Mount Target Id in given availability", "json object containing all the attributes supported by feature \"\"\"", "the current package version is the latest one.\"\"\" try: latest", ":param bucket_name: name of the S3 bucket to delete \"\"\"", "limitations under the License. # fmt: off from __future__ import", "mount_target in mount_targets.get(\"MountTargets\"): # Check to see if there is", "\"\" return \"https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/\".format( REGION=region, S3_SUFFIX=s3_suffix ) def get_installed_version(): \"\"\"Get the", "mount_target_id = mount_target.get(\"MountTargetId\") return mount_target_id def get_avail_zone(subnet_id): avail_zone = None", "\"\"\" Create a zip archive containing all files and dirs", "= ec2.describe_instances(InstanceIds=[master_id]).get(\"Reservations\")[0].get(\"Instances\")[0] ip_address = instance.get(\"PublicIpAddress\") if ip_address is None: ip_address", "in root path. All dirs contained in root dir will", "\"\"\" Delete an S3 bucket together with all stored objects.", "exists, otherwise None \"\"\" param_value = next((i.get(\"ParameterValue\") for i in", "Parameters. :param params: Cloudformation Stack Parameters :param key_name: Parameter Key", "in stack_outputs if o.get(\"OutputKey\") == output_key), None) def get_stack(stack_name, cfn_client=None):", "License. # fmt: off from __future__ import absolute_import, print_function #", "mappings = template.get(\"TemplateBody\").get(\"Mappings\").get(\"OSFeatures\") base_os = get_cfn_param(stack_result.get(\"Parameters\"), \"BaseOS\") username = mappings.get(base_os).get(\"User\")", "avail_zone = ( boto3.client(\"ec2\").describe_subnets(SubnetIds=[subnet_id]).get(\"Subnets\")[0].get(\"AvailabilityZone\") ) except ClientError as e: LOGGER.debug(", "$bucket_name/$dir_name/artifacts.zip. All files contained in root dir will be uploaded", "return avail_zone def get_latest_alinux_ami_id(): \"\"\"Get latest alinux ami id.\"\"\" try:", "master server, or [] if no master server.\"\"\" try: resources", "is created in memory and a file handler is returned", "\"Features\": { \"efa\": { \"instances\": [\"c5n.18xlarge\", \"p3dn.24xlarge\", \"i3en.24xlarge\"], \"baseos\": [\"alinux\",", "region \"\"\" s3_client = boto3.client(\"s3\") \"\"\" :type : pyboto3.s3 \"\"\"", "try: if region != \"us-east-1\": s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={\"LocationConstraint\": region}) else: s3_client.create_bucket(Bucket=bucket_name)", "handler is returned by the function. :param path: directory containing", "a tuple of the os supported by parallelcluster for the", "file), start=path)) file_out.seek(0) return file_out def upload_resources_artifacts(bucket_name, root): \"\"\" Upload", "\"\"\" try: if not cfn_client: cfn_client = boto3.client(\"cloudformation\") return cfn_client.describe_stacks(StackName=stack_name).get(\"Stacks\")[0]", "OF ANY KIND, express or implied. See the License for", "running. Can't SSH\") instance = ec2.describe_instances(InstanceIds=[master_id]).get(\"Reservations\")[0].get(\"Instances\")[0] ip_address = instance.get(\"PublicIpAddress\") if", "( boto3.client(\"ec2\").describe_subnets(SubnetIds=[subnet_id]).get(\"Subnets\")[0].get(\"AvailabilityZone\") ) except ClientError as e: LOGGER.debug( \"Unable to", "resources = boto3.client(\"cloudformation\").describe_stack_resource( StackName=stack_name, LogicalResourceId=\"MasterServer\" ) return resources.get(\"StackResourceDetail\").get(\"PhysicalResourceId\") except ClientError", "CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE but MasterServer is running master_ip =", "the supported scheduler \"\"\" return \"sge\", \"torque\", \"slurm\", \"awsbatch\" def", "name of the cloudformation stack :param config: Config object :return", "bucket = boto3.resource(\"s3\").Bucket(bucket_name) bucket.objects.all().delete() bucket.delete() except boto3.client(\"s3\").exceptions.NoSuchBucket: pass except ClientError:", "zone for the given EFS file system id. :param efs_fs_id:", "type. :param region: AWS Region :param instance_type: the instance type", "name to get :return: a json object representing the file", "and a file handler is returned by the function. :param", "name that we should verify :param cfn_client: the CloudFormation client", "region: aws region \"\"\" s3_client = boto3.client(\"s3\") \"\"\" :type :", "affiliates. All Rights Reserved. # # Licensed under the Apache", "ip_address = instance.get(\"PublicIpAddress\") if ip_address is None: ip_address = instance.get(\"PrivateIpAddress\")", "ClientError if unable to download the file :raises ValueError if", "files and dirs rooted in path. The archive is created", "all files and dirs rooted in path. The archive is", "resource_status != \"\": LOGGER.debug(resource_status) if status != \"CREATE_COMPLETE\": LOGGER.critical(\"\\nCluster creation", "!= \"CREATE_COMPLETE\": LOGGER.critical(\"\\nCluster creation failed. Failed events:\") events = cfn_client.describe_stack_events(StackName=stack_name).get(\"StackEvents\")", "list_ec2_instance_types(): \"\"\"Return a list of all the instance types available", "Amazon Linux AMI id.\\n{0}\".format(e.response.get(\"Error\").get(\"Message\"))) return alinux_ami_id def list_ec2_instance_types(): \"\"\"Return a", "Region :param instance_type: the instance type to search for. :return:", "fmt: on import json import logging import os import sys", "bucket.delete() except boto3.client(\"s3\").exceptions.NoSuchBucket: pass except ClientError: print(\"Failed to delete bucket" ]
[ "EventBus from myevent import GreetEvent from myevent import ByeEvent from", "__author__ = 'Xsank' import time from thinkutils_plus.eventbus.eventbus import EventBus from", "__name__==\"__main__\": eventbus=EventBus() eventbus.register(MyListener()) ge=GreetEvent('world') be=ByeEvent('world') eventbus.async_post(be) eventbus.post(ge) time.sleep(0.1) eventbus.unregister(MyListener()) eventbus.destroy()", "'Xsank' import time from thinkutils_plus.eventbus.eventbus import EventBus from myevent import", "time from thinkutils_plus.eventbus.eventbus import EventBus from myevent import GreetEvent from", "= 'Xsank' import time from thinkutils_plus.eventbus.eventbus import EventBus from myevent", "if __name__==\"__main__\": eventbus=EventBus() eventbus.register(MyListener()) ge=GreetEvent('world') be=ByeEvent('world') eventbus.async_post(be) eventbus.post(ge) time.sleep(0.1) eventbus.unregister(MyListener())", "myevent import ByeEvent from mylistener import MyListener if __name__==\"__main__\": eventbus=EventBus()", "from thinkutils_plus.eventbus.eventbus import EventBus from myevent import GreetEvent from myevent", "from myevent import ByeEvent from mylistener import MyListener if __name__==\"__main__\":", "import MyListener if __name__==\"__main__\": eventbus=EventBus() eventbus.register(MyListener()) ge=GreetEvent('world') be=ByeEvent('world') eventbus.async_post(be) eventbus.post(ge)", "mylistener import MyListener if __name__==\"__main__\": eventbus=EventBus() eventbus.register(MyListener()) ge=GreetEvent('world') be=ByeEvent('world') eventbus.async_post(be)", "from myevent import GreetEvent from myevent import ByeEvent from mylistener", "import time from thinkutils_plus.eventbus.eventbus import EventBus from myevent import GreetEvent", "myevent import GreetEvent from myevent import ByeEvent from mylistener import", "import EventBus from myevent import GreetEvent from myevent import ByeEvent", "from mylistener import MyListener if __name__==\"__main__\": eventbus=EventBus() eventbus.register(MyListener()) ge=GreetEvent('world') be=ByeEvent('world')", "import ByeEvent from mylistener import MyListener if __name__==\"__main__\": eventbus=EventBus() eventbus.register(MyListener())", "MyListener if __name__==\"__main__\": eventbus=EventBus() eventbus.register(MyListener()) ge=GreetEvent('world') be=ByeEvent('world') eventbus.async_post(be) eventbus.post(ge) time.sleep(0.1)", "import GreetEvent from myevent import ByeEvent from mylistener import MyListener", "GreetEvent from myevent import ByeEvent from mylistener import MyListener if", "ByeEvent from mylistener import MyListener if __name__==\"__main__\": eventbus=EventBus() eventbus.register(MyListener()) ge=GreetEvent('world')", "thinkutils_plus.eventbus.eventbus import EventBus from myevent import GreetEvent from myevent import" ]
[ "--device=015d14fec128220d'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() finder_options.android_device =", "def tearDown(self): self._android_device_stub.Restore() self._apb_stub.Restore() def testNoAdbReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() def", "import benchmark from telemetry.core import browser_options from telemetry.core.platform import android_device", "license that can be # found in the LICENSE file.", "self._android_device_stub = system_stub.Override( android_device, ['adb_commands']) def testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices = [", "raise OSError('not found') self._android_device_stub.subprocess.Popen = NoAdb self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def", "testAdbOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = ( ['015d14fec128220c']) device =", "tearDown(self): self._android_device_stub.Restore() class GetDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device,", "self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbNoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self.assertEquals([], self._android_device_stub.logging.warnings)", "self._android_device_stub.adb_commands.attached_devices = [ '01', '02'] self.assertEquals( set(['01', '02']), set(device.device_id for", "def testNoAdbReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() def NoAdb(*_, **__): raise OSError('not", "= '555d14fecddddddd' # pick one self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '555d14fecddddddd']", "be # found in the LICENSE file. import unittest from", "def testAdbPermissionsErrorReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result = ( 'List of", "finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '015d14fec128220d'] device =", "--device=015d14fec128220c\\n' ' --device=015d14fec128220d'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions()", "self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbTwoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = [", "import unittest from telemetry import benchmark from telemetry.core import browser_options", "permissions\\n', '* daemon not running. starting it now on port", "adb kill-server', ' sudo `which adb` devices\\n\\n'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def", "def testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() finder_options.android_device = '555d14fecddddddd' # pick", "'os', 'subprocess', 'logging']) self._apb_stub = system_stub.Override( android_platform_backend, ['adb_commands']) def tearDown(self):", "'logging']) self._apb_stub = system_stub.Override( android_platform_backend, ['adb_commands']) def tearDown(self): self._android_device_stub.Restore() self._apb_stub.Restore()", "finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result = ( 'List of devices attached\\n????????????\\tno", "device = android_device.GetDevice(finder_options) self.assertEquals([ 'Multiple devices attached. Please specify one", "self._android_device_stub = system_stub.Override( android_device, ['adb_commands', 'os', 'subprocess', 'logging']) self._apb_stub =", "attached\\n????????????\\tno permissions\\n', '* daemon not running. starting it now on", "android_device.GetDevice(finder_options) self.assertEquals([ 'adb devices gave a permissions error. Consider running", "' adb kill-server', ' sudo `which adb` devices\\n\\n'], self._android_device_stub.logging.warnings) self.assertIsNone(device)", "BSD-style license that can be # found in the LICENSE", "self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbNoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options))", "by a BSD-style license that can be # found in", "# Use of this source code is governed by a", "is governed by a BSD-style license that can be #", "now on port 5037 *\\n' '* daemon started successfully *\\n')", "self._android_device_stub.adb_commands.attached_devices = ( ['015d14fec128220c']) device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('015d14fec128220c',", "def testAdbTwoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '015d14fec128220d']", "self._android_device_stub.Restore() self._apb_stub.Restore() def testNoAdbReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() def NoAdb(*_, **__):", "self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbPermissionsErrorReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result =", "android_device, ['adb_commands']) def testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices = [ '01', '02'] self.assertEquals(", "browser_options.BrowserFinderOptions() def NoAdb(*_, **__): raise OSError('not found') self._android_device_stub.subprocess.Popen = NoAdb", "android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd', device.device_id) def testAdbOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions()", "self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbPermissionsErrorReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result = (", "android_device.AndroidDevice.GetAllConnectedDevices() )) def tearDown(self): self._android_device_stub.Restore() class GetDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub", "of this source code is governed by a BSD-style license", "device.device_id) def testAdbOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = ( ['015d14fec128220c'])", "port 5037 *\\n' '* daemon started successfully *\\n') device =", "'* daemon started successfully *\\n') device = android_device.GetDevice(finder_options) self.assertEquals([ 'adb", "of devices attached\\n????????????\\tno permissions\\n', '* daemon not running. starting it", "Please specify one of the following:\\n' ' --device=015d14fec128220c\\n' ' --device=015d14fec128220d'],", "def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands']) def testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices", "root:', ' adb kill-server', ' sudo `which adb` devices\\n\\n'], self._android_device_stub.logging.warnings)", "The Chromium Authors. All rights reserved. # Use of this", "self._android_device_stub.subprocess.Popen = NoAdb self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbNoDevicesReturnsNone(self): finder_options =", "*\\n') device = android_device.GetDevice(finder_options) self.assertEquals([ 'adb devices gave a permissions", "browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = ( ['015d14fec128220c']) device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings)", "starting it now on port 5037 *\\n' '* daemon started", "devices attached. Please specify one of the following:\\n' ' --device=015d14fec128220c\\n'", "reserved. # Use of this source code is governed by", "android_device, ['adb_commands', 'os', 'subprocess', 'logging']) self._apb_stub = system_stub.Override( android_platform_backend, ['adb_commands'])", "self._android_device_stub.subprocess.Popen.communicate_result = ( 'List of devices attached\\n????????????\\tno permissions\\n', '* daemon", "self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '015d14fec128220d'] device = android_device.GetDevice(finder_options) self.assertEquals([ 'Multiple", "android_device.GetDevice(finder_options) self.assertEquals([ 'Multiple devices attached. Please specify one of the", "devices gave a permissions error. Consider running adb as root:',", "started successfully *\\n') device = android_device.GetDevice(finder_options) self.assertEquals([ 'adb devices gave", "['adb_commands']) def tearDown(self): self._android_device_stub.Restore() self._apb_stub.Restore() def testNoAdbReturnsNone(self): finder_options = browser_options.BrowserFinderOptions()", "# Copyright 2014 The Chromium Authors. All rights reserved. #", "`which adb` devices\\n\\n'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbTwoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions()", "5037 *\\n' '* daemon started successfully *\\n') device = android_device.GetDevice(finder_options)", "*\\n' '* daemon started successfully *\\n') device = android_device.GetDevice(finder_options) self.assertEquals([", "android_platform_backend from telemetry.unittest_util import system_stub class AndroidDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub", "def NoAdb(*_, **__): raise OSError('not found') self._android_device_stub.subprocess.Popen = NoAdb self.assertEquals([],", "devices\\n\\n'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbTwoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices =", "def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands', 'os', 'subprocess', 'logging'])", "= browser_options.BrowserFinderOptions() self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbPermissionsErrorReturnsNone(self): finder_options = browser_options.BrowserFinderOptions()", "'555d14fecddddddd'] device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd', device.device_id) def testAdbOneDeviceReturnsDeviceInstance(self):", "def testAdbNoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbPermissionsErrorReturnsNone(self):", "code is governed by a BSD-style license that can be", "a BSD-style license that can be # found in the", "self._apb_stub = system_stub.Override( android_platform_backend, ['adb_commands']) def tearDown(self): self._android_device_stub.Restore() self._apb_stub.Restore() def", "self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbNoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def", "= browser_options.BrowserFinderOptions() def NoAdb(*_, **__): raise OSError('not found') self._android_device_stub.subprocess.Popen =", "self.assertEquals('555d14fecddddddd', device.device_id) def testAdbOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = (", "self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbPermissionsErrorReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result = ( 'List", "setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands']) def testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices =", "finder_options = browser_options.BrowserFinderOptions() self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbPermissionsErrorReturnsNone(self): finder_options =", "that can be # found in the LICENSE file. import", "LICENSE file. import unittest from telemetry import benchmark from telemetry.core", "governed by a BSD-style license that can be # found", "unittest from telemetry import benchmark from telemetry.core import browser_options from", "telemetry.core.platform import android_platform_backend from telemetry.unittest_util import system_stub class AndroidDeviceTest(unittest.TestCase): def", "file. import unittest from telemetry import benchmark from telemetry.core import", "successfully *\\n') device = android_device.GetDevice(finder_options) self.assertEquals([ 'adb devices gave a", "permissions error. Consider running adb as root:', ' adb kill-server',", "Use of this source code is governed by a BSD-style", "telemetry.core.platform import android_device from telemetry.core.platform import android_platform_backend from telemetry.unittest_util import", "self.assertEquals([ 'Multiple devices attached. Please specify one of the following:\\n'", "def testAdbOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = ( ['015d14fec128220c']) device", "system_stub.Override( android_device, ['adb_commands', 'os', 'subprocess', 'logging']) self._apb_stub = system_stub.Override( android_platform_backend,", "finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = ( ['015d14fec128220c']) device = android_device.GetDevice(finder_options)", "NoAdb self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbNoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self.assertEquals([],", "one of the following:\\n' ' --device=015d14fec128220c\\n' ' --device=015d14fec128220d'], self._android_device_stub.logging.warnings) self.assertIsNone(device)", "for device in android_device.AndroidDevice.GetAllConnectedDevices() )) def tearDown(self): self._android_device_stub.Restore() class GetDeviceTest(unittest.TestCase):", "'adb devices gave a permissions error. Consider running adb as", "android_device from telemetry.core.platform import android_platform_backend from telemetry.unittest_util import system_stub class", "['adb_commands', 'os', 'subprocess', 'logging']) self._apb_stub = system_stub.Override( android_platform_backend, ['adb_commands']) def", "= android_device.GetDevice(finder_options) self.assertEquals([ 'adb devices gave a permissions error. Consider", "All rights reserved. # Use of this source code is", "pick one self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '555d14fecddddddd'] device = android_device.GetDevice(finder_options)", "daemon started successfully *\\n') device = android_device.GetDevice(finder_options) self.assertEquals([ 'adb devices", "self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd', device.device_id) def testAdbOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices =", "' --device=015d14fec128220c\\n' ' --device=015d14fec128220d'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options =", "= system_stub.Override( android_device, ['adb_commands']) def testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices = [ '01',", "= system_stub.Override( android_device, ['adb_commands', 'os', 'subprocess', 'logging']) self._apb_stub = system_stub.Override(", "import browser_options from telemetry.core.platform import android_device from telemetry.core.platform import android_platform_backend", "testNoAdbReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() def NoAdb(*_, **__): raise OSError('not found')", "'02'] self.assertEquals( set(['01', '02']), set(device.device_id for device in android_device.AndroidDevice.GetAllConnectedDevices() ))", "running adb as root:', ' adb kill-server', ' sudo `which", "'555d14fecddddddd' # pick one self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '555d14fecddddddd'] device", "from telemetry.core import browser_options from telemetry.core.platform import android_device from telemetry.core.platform", "2014 The Chromium Authors. All rights reserved. # Use of", "daemon not running. starting it now on port 5037 *\\n'", "attached. Please specify one of the following:\\n' ' --device=015d14fec128220c\\n' '", "browser_options from telemetry.core.platform import android_device from telemetry.core.platform import android_platform_backend from", "adb` devices\\n\\n'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbTwoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices", "import android_device from telemetry.core.platform import android_platform_backend from telemetry.unittest_util import system_stub", "**__): raise OSError('not found') self._android_device_stub.subprocess.Popen = NoAdb self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options))", "finder_options.android_device = '555d14fecddddddd' # pick one self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c',", "OSError('not found') self._android_device_stub.subprocess.Popen = NoAdb self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbNoDevicesReturnsNone(self):", "testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() finder_options.android_device = '555d14fecddddddd' # pick one", "testAdbNoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbPermissionsErrorReturnsNone(self): finder_options", "sudo `which adb` devices\\n\\n'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbTwoDevicesReturnsNone(self): finder_options =", "self.assertEquals([ 'adb devices gave a permissions error. Consider running adb", "Copyright 2014 The Chromium Authors. All rights reserved. # Use", "device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd', device.device_id) def testAdbOneDeviceReturnsDeviceInstance(self): finder_options", "self.assertEquals( set(['01', '02']), set(device.device_id for device in android_device.AndroidDevice.GetAllConnectedDevices() )) def", "telemetry.unittest_util import system_stub class AndroidDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override(", "in android_device.AndroidDevice.GetAllConnectedDevices() )) def tearDown(self): self._android_device_stub.Restore() class GetDeviceTest(unittest.TestCase): def setUp(self):", "= browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result = ( 'List of devices attached\\n????????????\\tno permissions\\n',", "= ( 'List of devices attached\\n????????????\\tno permissions\\n', '* daemon not", "in the LICENSE file. import unittest from telemetry import benchmark", "import system_stub class AndroidDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device,", "self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '555d14fecddddddd'] device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings)", "benchmark from telemetry.core import browser_options from telemetry.core.platform import android_device from", "testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices = [ '01', '02'] self.assertEquals( set(['01', '02']), set(device.device_id", "self._android_device_stub.Restore() class GetDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands',", "system_stub class AndroidDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands'])", "def tearDown(self): self._android_device_stub.Restore() class GetDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override(", "'subprocess', 'logging']) self._apb_stub = system_stub.Override( android_platform_backend, ['adb_commands']) def tearDown(self): self._android_device_stub.Restore()", "'015d14fec128220c', '015d14fec128220d'] device = android_device.GetDevice(finder_options) self.assertEquals([ 'Multiple devices attached. Please", "telemetry import benchmark from telemetry.core import browser_options from telemetry.core.platform import", "from telemetry import benchmark from telemetry.core import browser_options from telemetry.core.platform", "testAdbTwoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '015d14fec128220d'] device", "tearDown(self): self._android_device_stub.Restore() self._apb_stub.Restore() def testNoAdbReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() def NoAdb(*_,", "self.assertIsNone(device) def testAdbTwoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c',", "from telemetry.unittest_util import system_stub class AndroidDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub =", "not running. starting it now on port 5037 *\\n' '*", "as root:', ' adb kill-server', ' sudo `which adb` devices\\n\\n'],", "kill-server', ' sudo `which adb` devices\\n\\n'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbTwoDevicesReturnsNone(self):", "= android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd', device.device_id) def testAdbOneDeviceReturnsDeviceInstance(self): finder_options =", "rights reserved. # Use of this source code is governed", "testAdbPermissionsErrorReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result = ( 'List of devices", "self._apb_stub.Restore() def testNoAdbReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() def NoAdb(*_, **__): raise", "<filename>tools/telemetry/telemetry/core/platform/android_device_unittest.py # Copyright 2014 The Chromium Authors. All rights reserved.", "Chromium Authors. All rights reserved. # Use of this source", "from telemetry.core.platform import android_platform_backend from telemetry.unittest_util import system_stub class AndroidDeviceTest(unittest.TestCase):", ")) def tearDown(self): self._android_device_stub.Restore() class GetDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub =", "class GetDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands', 'os',", "browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '015d14fec128220d'] device = android_device.GetDevice(finder_options) self.assertEquals([", "= browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = ( ['015d14fec128220c']) device = android_device.GetDevice(finder_options) self.assertEquals([],", "= system_stub.Override( android_platform_backend, ['adb_commands']) def tearDown(self): self._android_device_stub.Restore() self._apb_stub.Restore() def testNoAdbReturnsNone(self):", "set(device.device_id for device in android_device.AndroidDevice.GetAllConnectedDevices() )) def tearDown(self): self._android_device_stub.Restore() class", "def testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices = [ '01', '02'] self.assertEquals( set(['01', '02']),", "specify one of the following:\\n' ' --device=015d14fec128220c\\n' ' --device=015d14fec128220d'], self._android_device_stub.logging.warnings)", "' --device=015d14fec128220d'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() finder_options.android_device", "browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result = ( 'List of devices attached\\n????????????\\tno permissions\\n', '*", "'01', '02'] self.assertEquals( set(['01', '02']), set(device.device_id for device in android_device.AndroidDevice.GetAllConnectedDevices()", "self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() finder_options.android_device = '555d14fecddddddd'", "android_platform_backend, ['adb_commands']) def tearDown(self): self._android_device_stub.Restore() self._apb_stub.Restore() def testNoAdbReturnsNone(self): finder_options =", "self.assertIsNone(device) def testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() finder_options.android_device = '555d14fecddddddd' #", "finder_options = browser_options.BrowserFinderOptions() finder_options.android_device = '555d14fecddddddd' # pick one self._android_device_stub.adb_commands.attached_devices", "# pick one self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '555d14fecddddddd'] device =", "= [ '015d14fec128220c', '555d14fecddddddd'] device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd',", "GetDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands', 'os', 'subprocess',", "'List of devices attached\\n????????????\\tno permissions\\n', '* daemon not running. starting", "'02']), set(device.device_id for device in android_device.AndroidDevice.GetAllConnectedDevices() )) def tearDown(self): self._android_device_stub.Restore()", "[ '015d14fec128220c', '555d14fecddddddd'] device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd', device.device_id)", "found in the LICENSE file. import unittest from telemetry import", "' sudo `which adb` devices\\n\\n'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbTwoDevicesReturnsNone(self): finder_options", "'015d14fec128220d'] device = android_device.GetDevice(finder_options) self.assertEquals([ 'Multiple devices attached. Please specify", "this source code is governed by a BSD-style license that", "finder_options = browser_options.BrowserFinderOptions() def NoAdb(*_, **__): raise OSError('not found') self._android_device_stub.subprocess.Popen", "class AndroidDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands']) def", "= [ '015d14fec128220c', '015d14fec128220d'] device = android_device.GetDevice(finder_options) self.assertEquals([ 'Multiple devices", "a permissions error. Consider running adb as root:', ' adb", "system_stub.Override( android_platform_backend, ['adb_commands']) def tearDown(self): self._android_device_stub.Restore() self._apb_stub.Restore() def testNoAdbReturnsNone(self): finder_options", "( 'List of devices attached\\n????????????\\tno permissions\\n', '* daemon not running.", "= ( ['015d14fec128220c']) device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('015d14fec128220c', device.device_id)", "the LICENSE file. import unittest from telemetry import benchmark from", "# found in the LICENSE file. import unittest from telemetry", "Consider running adb as root:', ' adb kill-server', ' sudo", "following:\\n' ' --device=015d14fec128220c\\n' ' --device=015d14fec128220d'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options", "[ '015d14fec128220c', '015d14fec128220d'] device = android_device.GetDevice(finder_options) self.assertEquals([ 'Multiple devices attached.", "= browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '015d14fec128220d'] device = android_device.GetDevice(finder_options)", "setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands', 'os', 'subprocess', 'logging']) self._apb_stub", "error. Consider running adb as root:', ' adb kill-server', '", "browser_options.BrowserFinderOptions() finder_options.android_device = '555d14fecddddddd' # pick one self._android_device_stub.adb_commands.attached_devices = [", "'015d14fec128220c', '555d14fecddddddd'] device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd', device.device_id) def", "running. starting it now on port 5037 *\\n' '* daemon", "= browser_options.BrowserFinderOptions() finder_options.android_device = '555d14fecddddddd' # pick one self._android_device_stub.adb_commands.attached_devices =", "self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd', device.device_id) def testAdbOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices", "AndroidDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands']) def testGetAllAttachedAndroidDevices(self):", "device in android_device.AndroidDevice.GetAllConnectedDevices() )) def tearDown(self): self._android_device_stub.Restore() class GetDeviceTest(unittest.TestCase): def", "import android_platform_backend from telemetry.unittest_util import system_stub class AndroidDeviceTest(unittest.TestCase): def setUp(self):", "= NoAdb self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbNoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions()", "of the following:\\n' ' --device=015d14fec128220c\\n' ' --device=015d14fec128220d'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def", "NoAdb(*_, **__): raise OSError('not found') self._android_device_stub.subprocess.Popen = NoAdb self.assertEquals([], self._android_device_stub.logging.warnings)", "on port 5037 *\\n' '* daemon started successfully *\\n') device", "device = android_device.GetDevice(finder_options) self.assertEquals([ 'adb devices gave a permissions error.", "one self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '555d14fecddddddd'] device = android_device.GetDevice(finder_options) self.assertEquals([],", "devices attached\\n????????????\\tno permissions\\n', '* daemon not running. starting it now", "source code is governed by a BSD-style license that can", "system_stub.Override( android_device, ['adb_commands']) def testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices = [ '01', '02']", "Authors. All rights reserved. # Use of this source code", "['adb_commands']) def testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices = [ '01', '02'] self.assertEquals( set(['01',", "[ '01', '02'] self.assertEquals( set(['01', '02']), set(device.device_id for device in", "the following:\\n' ' --device=015d14fec128220c\\n' ' --device=015d14fec128220d'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbPickOneDeviceReturnsDeviceInstance(self):", "browser_options.BrowserFinderOptions() self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbPermissionsErrorReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result", "= android_device.GetDevice(finder_options) self.assertEquals([ 'Multiple devices attached. Please specify one of", "= [ '01', '02'] self.assertEquals( set(['01', '02']), set(device.device_id for device", "adb as root:', ' adb kill-server', ' sudo `which adb`", "telemetry.core import browser_options from telemetry.core.platform import android_device from telemetry.core.platform import", "gave a permissions error. Consider running adb as root:', '", "found') self._android_device_stub.subprocess.Popen = NoAdb self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbNoDevicesReturnsNone(self): finder_options", "'Multiple devices attached. Please specify one of the following:\\n' '", "set(['01', '02']), set(device.device_id for device in android_device.AndroidDevice.GetAllConnectedDevices() )) def tearDown(self):", "from telemetry.core.platform import android_device from telemetry.core.platform import android_platform_backend from telemetry.unittest_util", "'* daemon not running. starting it now on port 5037", "it now on port 5037 *\\n' '* daemon started successfully", "can be # found in the LICENSE file. import unittest" ]
[ "'' self.logger = logging.getLogger(); self.logger.setLevel(logging.INFO) filename = LOG_PATH + str(filename)", "%(message)s', datefmt='%d-%m %H:%M' ) self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return #------------------------------------ def add(self,", "A ready to use logging class. All you need to", "self.handler = None LOG_PATH = log_dir assert type(filename) == str", "#------------------------------------ def add(self, message): assert type(message) == str self.logger.info(message); return", "use logging class. All you need to do is set", "import logging #--------------------------------------- class logger : ''' A ready to", "coverage. You can edit any of the below configuration to", "''' A ready to use logging class. All you need", "important, I just added it for coverage. You can edit", "logging #--------------------------------------- class logger : ''' A ready to use", "configuration to whatever you like. ''' def __init__(self, filename, log_dir='../data/log'):", "str(filename) self.handler = logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter = logging.Formatter( fmt='%(asctime)s :", "to use logging class. All you need to do is", "logging.Formatter( fmt='%(asctime)s : %(message)s', datefmt='%d-%m %H:%M' ) self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return", "= logging.Formatter( fmt='%(asctime)s : %(message)s', datefmt='%d-%m %H:%M' ) self.handler.setFormatter(formatter) self.logger.addHandler(self.handler)", "for coverage. You can edit any of the below configuration", "str or filename != '' self.logger = logging.getLogger(); self.logger.setLevel(logging.INFO) filename", "add text, type obj.add(\"some text\"). The function obj.close() is not", "to do is set an object with the parameters (log_filename,", "LOG_PATH + str(filename) self.handler = logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter = logging.Formatter(", "an object with the parameters (log_filename, directory to save it)", "added it for coverage. You can edit any of the", "to whatever you like. ''' def __init__(self, filename, log_dir='../data/log'): self.log", "whenever you want to add text, type obj.add(\"some text\"). The", "The function obj.close() is not important, I just added it", "text\"). The function obj.close() is not important, I just added", "just added it for coverage. You can edit any of", "= None LOG_PATH = log_dir assert type(filename) == str or", "filename != '' self.logger = logging.getLogger(); self.logger.setLevel(logging.INFO) filename = LOG_PATH", "with the parameters (log_filename, directory to save it) then whenever", "self.logger.setLevel(logging.INFO) filename = LOG_PATH + str(filename) self.handler = logging.FileHandler(filename) self.handler.setLevel(logging.INFO)", "logger : ''' A ready to use logging class. All", "to save it) then whenever you want to add text,", "set an object with the parameters (log_filename, directory to save", "= log_dir assert type(filename) == str or filename != ''", "ready to use logging class. All you need to do", "edit any of the below configuration to whatever you like.", "#! /usr/bin/env python import logging #--------------------------------------- class logger : '''", "class. All you need to do is set an object", "== str or filename != '' self.logger = logging.getLogger(); self.logger.setLevel(logging.INFO)", "= logging.getLogger(); self.logger.setLevel(logging.INFO) filename = LOG_PATH + str(filename) self.handler =", ") self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return #------------------------------------ def add(self, message): assert type(message)", "self.handler.setLevel(logging.INFO) formatter = logging.Formatter( fmt='%(asctime)s : %(message)s', datefmt='%d-%m %H:%M' )", "filename = LOG_PATH + str(filename) self.handler = logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter", "you want to add text, type obj.add(\"some text\"). The function", ": %(message)s', datefmt='%d-%m %H:%M' ) self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return #------------------------------------ def", "parameters (log_filename, directory to save it) then whenever you want", "All you need to do is set an object with", "object with the parameters (log_filename, directory to save it) then", "#--------------------------------------- class logger : ''' A ready to use logging", "you need to do is set an object with the", "obj.add(\"some text\"). The function obj.close() is not important, I just", "or filename != '' self.logger = logging.getLogger(); self.logger.setLevel(logging.INFO) filename =", "self.logger.addHandler(self.handler) return #------------------------------------ def add(self, message): assert type(message) == str", "is set an object with the parameters (log_filename, directory to", "self.log = None self.handler = None LOG_PATH = log_dir assert", "log_dir assert type(filename) == str or filename != '' self.logger", "= logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter = logging.Formatter( fmt='%(asctime)s : %(message)s', datefmt='%d-%m", "def add(self, message): assert type(message) == str self.logger.info(message); return #------------------------------------", "logging.getLogger(); self.logger.setLevel(logging.INFO) filename = LOG_PATH + str(filename) self.handler = logging.FileHandler(filename)", "None LOG_PATH = log_dir assert type(filename) == str or filename", "python import logging #--------------------------------------- class logger : ''' A ready", "log_dir='../data/log'): self.log = None self.handler = None LOG_PATH = log_dir", "is not important, I just added it for coverage. You", "add(self, message): assert type(message) == str self.logger.info(message); return #------------------------------------ def", "assert type(message) == str self.logger.info(message); return #------------------------------------ def close(self): self.logger.removeHandler(self.handler)", "obj.close() is not important, I just added it for coverage.", "of the below configuration to whatever you like. ''' def", "= LOG_PATH + str(filename) self.handler = logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter =", "class logger : ''' A ready to use logging class.", "then whenever you want to add text, type obj.add(\"some text\").", "<reponame>bekaaa/xgboost_tuner<gh_stars>0 #! /usr/bin/env python import logging #--------------------------------------- class logger :", "(log_filename, directory to save it) then whenever you want to", "/usr/bin/env python import logging #--------------------------------------- class logger : ''' A", "want to add text, type obj.add(\"some text\"). The function obj.close()", "it) then whenever you want to add text, type obj.add(\"some", "to add text, type obj.add(\"some text\"). The function obj.close() is", "%H:%M' ) self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return #------------------------------------ def add(self, message): assert", "need to do is set an object with the parameters", "type(message) == str self.logger.info(message); return #------------------------------------ def close(self): self.logger.removeHandler(self.handler) return", "formatter = logging.Formatter( fmt='%(asctime)s : %(message)s', datefmt='%d-%m %H:%M' ) self.handler.setFormatter(formatter)", "do is set an object with the parameters (log_filename, directory", "whatever you like. ''' def __init__(self, filename, log_dir='../data/log'): self.log =", "self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return #------------------------------------ def add(self, message): assert type(message) ==", "+ str(filename) self.handler = logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter = logging.Formatter( fmt='%(asctime)s", "datefmt='%d-%m %H:%M' ) self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return #------------------------------------ def add(self, message):", "like. ''' def __init__(self, filename, log_dir='../data/log'): self.log = None self.handler", "can edit any of the below configuration to whatever you", "== str self.logger.info(message); return #------------------------------------ def close(self): self.logger.removeHandler(self.handler) return #----------------------------------------", "save it) then whenever you want to add text, type", "def __init__(self, filename, log_dir='../data/log'): self.log = None self.handler = None", "fmt='%(asctime)s : %(message)s', datefmt='%d-%m %H:%M' ) self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return #------------------------------------", "type obj.add(\"some text\"). The function obj.close() is not important, I", "assert type(filename) == str or filename != '' self.logger =", "type(filename) == str or filename != '' self.logger = logging.getLogger();", "logging class. All you need to do is set an", "''' def __init__(self, filename, log_dir='../data/log'): self.log = None self.handler =", "self.handler = logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter = logging.Formatter( fmt='%(asctime)s : %(message)s',", "__init__(self, filename, log_dir='../data/log'): self.log = None self.handler = None LOG_PATH", "self.logger = logging.getLogger(); self.logger.setLevel(logging.INFO) filename = LOG_PATH + str(filename) self.handler", "return #------------------------------------ def add(self, message): assert type(message) == str self.logger.info(message);", "directory to save it) then whenever you want to add", "!= '' self.logger = logging.getLogger(); self.logger.setLevel(logging.INFO) filename = LOG_PATH +", "you like. ''' def __init__(self, filename, log_dir='../data/log'): self.log = None", "text, type obj.add(\"some text\"). The function obj.close() is not important,", "below configuration to whatever you like. ''' def __init__(self, filename,", "it for coverage. You can edit any of the below", "None self.handler = None LOG_PATH = log_dir assert type(filename) ==", "not important, I just added it for coverage. You can", "function obj.close() is not important, I just added it for", "I just added it for coverage. You can edit any", "the parameters (log_filename, directory to save it) then whenever you", ": ''' A ready to use logging class. All you", "You can edit any of the below configuration to whatever", "logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter = logging.Formatter( fmt='%(asctime)s : %(message)s', datefmt='%d-%m %H:%M'", "LOG_PATH = log_dir assert type(filename) == str or filename !=", "any of the below configuration to whatever you like. '''", "the below configuration to whatever you like. ''' def __init__(self,", "message): assert type(message) == str self.logger.info(message); return #------------------------------------ def close(self):", "filename, log_dir='../data/log'): self.log = None self.handler = None LOG_PATH =", "= None self.handler = None LOG_PATH = log_dir assert type(filename)" ]
[ "TargetType(Enum): seq = auto() tree = auto() @staticmethod def from_string(s):", "str(total_examples)) if __name__ == '__main__': parser = ArgumentParser() parser.add_argument(\"-trd\", \"--train_data\",", "string. Order matters! [A-Z]+(?=[A-Z][a-z]) | # All upper case before", "obj['num_targets'] num_nodes = obj['num_nodes'] if max_targets is not None and", "= obj['num_nodes'] if max_targets is not None and num_targets >", "if not subtok == '_'] def subtokenize(s): failed = False", "max_nodes: return None, None if target_type is TargetType.seq: target_pred =", "parser.add_argument(\"-o\", \"--output_name\", dest=\"output_name\", help=\"output name - the base name for", "re.compile(r''' # Find words in a string. Order matters! [A-Z]+(?=[A-Z][a-z])", "list(javalang.tokenizer.tokenize('(' + s + ')'))[1:-1] except: tokens = s.split() failed", "return [subtok for subtok in RE_WORDS.findall(str) if not subtok ==", "def process_file(file_path, data_file_role, dataset_name, target_type, max_targets, max_nodes): total_examples = 0", "ValueError() target_type = TargetType.seq RE_WORDS = re.compile(r''' # Find words", "None or target_seq is None: continue source_output_file.write(source_seq + '\\n') target_output_file.write(target_seq", "= 'PRED' modifiers = ['public', 'private', 'protected', 'static'] class TargetType(Enum):", "target_pred def process_file(file_path, data_file_role, dataset_name, target_type, max_targets, max_nodes): total_examples =", "tokens if not i.value in modifiers] def subtokenize_tree(s): return '", "obj['left_context'] right_context = obj['right_context'] target_seq = obj['target_seq'] num_targets = obj['num_targets']", "= auto() @staticmethod def from_string(s): try: return TargetType[s] except KeyError:", "examples: if source_seq is None or target_seq is None: continue", "import partial PRED_TOKEN = 'PRED' modifiers = ['public', 'private', 'protected',", "obj['num_nodes'] if max_targets is not None and num_targets > max_targets:", "type=TargetType.from_string, choices=list(TargetType), required=True) parser.add_argument(\"--max_targets\", dest=\"max_targets\", type=int, required=False, default=40) parser.add_argument(\"--max_nodes\", dest=\"max_nodes\",", "= subtokenize_tree(obj['linearized_tree']) source = '{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, '", "source_output_file.write(source_seq + '\\n') target_output_file.write(target_seq + '\\n') total_examples += 1 #print(source_seq,", "num_nodes = obj['num_nodes'] if max_targets is not None and num_targets", "max_targets, max_nodes): total_examples = 0 source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role)", "in tokens if not i in modifiers] else: return ['", "max_nodes, line) for line in file] for source_seq, target_seq in", "\\d+ | # Numbers _ | \\\" | .+ ''',", "data file\", required=True) parser.add_argument(\"-ted\", \"--test_data\", dest=\"test_data_path\", help=\"path to test data", "as target_output_file: with open(file_path, 'r') as file: subtokenize_line = partial(process_line,", "s.split() failed = True if failed: return [' _ '.join(split_subtokens(i))", "'{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role) target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role) with open(source_output_path,", "functools import partial PRED_TOKEN = 'PRED' modifiers = ['public', 'private',", "re.split(TREE_SPLIT, s) if len(sub) > 0]) def process_line(target_type, max_targets, max_nodes,", "val_data_path], ['train', 'test', 'val']): process_file(file_path=data_file_path, data_file_role=data_role, dataset_name=args.output_name, target_type=args.target_type, max_targets=args.max_targets, max_nodes=args.max_nodes)", "tokens if not i in modifiers] else: return [' _", "default=40) parser.add_argument(\"--max_nodes\", dest=\"max_nodes\", type=int, required=False, default=None) parser.add_argument('--local', action='store_true') args =", "target_seq) print('File: ' + file_path) print('Total examples: ' + str(total_examples))", "target_pred = ' '.join(subtokenize(target_seq)).lower() elif target_type is TargetType.tree: target_pred =", "help=\"path to test data file\", required=True) parser.add_argument(\"-vd\", \"--val_data\", dest=\"val_data_path\", help=\"path", "args.train_data_path test_data_path = args.test_data_path val_data_path = args.val_data_path for data_file_path, data_role", "case \\d+ | # Numbers _ | \\\" | .+", "pool: if data_file_role in ['test', 'val']: examples = [process_line(target_type, max_targets,", "in RE_WORDS.findall(str) if not subtok == '_'] def subtokenize(s): failed", "> max_nodes: return None, None if target_type is TargetType.seq: target_pred", "subtokenize(s): failed = False try: tokens = list(javalang.tokenizer.tokenize(s)) except: try:", "= re.compile(r'([(),])') def split_subtokens(str): return [subtok for subtok in RE_WORDS.findall(str)", "num_targets = obj['num_targets'] num_nodes = obj['num_nodes'] if max_targets is not", "All upper case \\d+ | # Numbers _ | \\\"", "\"--output_name\", dest=\"output_name\", help=\"output name - the base name for the", "return None, None if target_type is TargetType.seq: target_pred = '", "def subtokenize_tree(s): return ' '.join([sub for sub in re.split(TREE_SPLIT, s)", "data file\", required=True) parser.add_argument(\"-vd\", \"--val_data\", dest=\"val_data_path\", help=\"path to validation data", "if data_file_role in ['test', 'val']: examples = [process_line(target_type, max_targets, max_nodes,", "raise ValueError() target_type = TargetType.seq RE_WORDS = re.compile(r''' # Find", "is not None and num_nodes > max_nodes: return None, None", "or target_seq is None: continue source_output_file.write(source_seq + '\\n') target_output_file.write(target_seq +", "parser.add_argument(\"-trd\", \"--train_data\", dest=\"train_data_path\", help=\"path to training data file\", required=True) parser.add_argument(\"-ted\",", "examples: ' + str(total_examples)) if __name__ == '__main__': parser =", "from_string(s): try: return TargetType[s] except KeyError: raise ValueError() target_type =", "target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role) with open(source_output_path, 'w') as source_output_file:", "import multiprocessing as mp import re from argparse import ArgumentParser", "All upper case before a capitalized word [A-Z]?[a-z]+ | #", "is not None and num_targets > max_targets: return None, None", "left_context = obj['left_context'] right_context = obj['right_context'] target_seq = obj['target_seq'] num_targets", "else: return [' _ '.join(split_subtokens(i.value)) for i in tokens if", "import json import multiprocessing as mp import re from argparse", "#print(source_seq, target_seq) print('File: ' + file_path) print('Total examples: ' +", "__name__ == '__main__': parser = ArgumentParser() parser.add_argument(\"-trd\", \"--train_data\", dest=\"train_data_path\", help=\"path", "- the base name for the created dataset\", metavar=\"FILE\", required=True,", "tokens = list(javalang.tokenizer.tokenize(s + '()'))[:-2] except: try: tokens = list(javalang.tokenizer.tokenize('('", "help=\"path to validation data file\", required=True) parser.add_argument(\"-o\", \"--output_name\", dest=\"output_name\", help=\"output", "json.loads(line) left_context = obj['left_context'] right_context = obj['right_context'] target_seq = obj['target_seq']", "re from argparse import ArgumentParser from enum import Enum, auto", "= obj['num_targets'] num_nodes = obj['num_nodes'] if max_targets is not None", "required=False, default=40) parser.add_argument(\"--max_nodes\", dest=\"max_nodes\", type=int, required=False, default=None) parser.add_argument('--local', action='store_true') args", "max_nodes, line) for line in file] else: examples = pool.imap_unordered(subtokenize_line,", "val_data_path = args.val_data_path for data_file_path, data_role in zip([train_data_path, test_data_path, val_data_path],", "_ '.join(split_subtokens(i.value)) for i in tokens if not i.value in", "in tokens if not i.value in modifiers] def subtokenize_tree(s): return", "0]) def process_line(target_type, max_targets, max_nodes, line): obj = json.loads(line) left_context", "seq = auto() tree = auto() @staticmethod def from_string(s): try:", "except: try: tokens = list(javalang.tokenizer.tokenize(s + '()'))[:-2] except: try: tokens", "except KeyError: raise ValueError() target_type = TargetType.seq RE_WORDS = re.compile(r'''", "required=True) parser.add_argument(\"--max_targets\", dest=\"max_targets\", type=int, required=False, default=40) parser.add_argument(\"--max_nodes\", dest=\"max_nodes\", type=int, required=False,", "dataset\", metavar=\"FILE\", required=True, default='data') parser.add_argument(\"--target_type\", dest=\"target_type\", type=TargetType.from_string, choices=list(TargetType), required=True) parser.add_argument(\"--max_targets\",", "default='data') parser.add_argument(\"--target_type\", dest=\"target_type\", type=TargetType.from_string, choices=list(TargetType), required=True) parser.add_argument(\"--max_targets\", dest=\"max_targets\", type=int, required=False,", "words in a string. Order matters! [A-Z]+(?=[A-Z][a-z]) | # All", "tokens = s.split() failed = True if failed: return ['", "modifiers] def subtokenize_tree(s): return ' '.join([sub for sub in re.split(TREE_SPLIT,", "i in tokens if not i in modifiers] else: return", "'\\n') total_examples += 1 #print(source_seq, target_seq) print('File: ' + file_path)", "list(javalang.tokenizer.tokenize(s)) except: try: tokens = list(javalang.tokenizer.tokenize(s + '()'))[:-2] except: try:", "word [A-Z]?[a-z]+ | # Capitalized words / all lower case", "Numbers _ | \\\" | .+ ''', re.VERBOSE) TREE_SPLIT =", "= json.loads(line) left_context = obj['left_context'] right_context = obj['right_context'] target_seq =", "+= 1 #print(source_seq, target_seq) print('File: ' + file_path) print('Total examples:", "required=True) parser.add_argument(\"-o\", \"--output_name\", dest=\"output_name\", help=\"output name - the base name", "args.test_data_path val_data_path = args.val_data_path for data_file_path, data_role in zip([train_data_path, test_data_path,", "failed = False try: tokens = list(javalang.tokenizer.tokenize(s)) except: try: tokens", "['test', 'val']: examples = [process_line(target_type, max_targets, max_nodes, line) for line", "open(target_output_path, 'w') as target_output_file: with open(file_path, 'r') as file: subtokenize_line", "to training data file\", required=True) parser.add_argument(\"-ted\", \"--test_data\", dest=\"test_data_path\", help=\"path to", "class TargetType(Enum): seq = auto() tree = auto() @staticmethod def", "'.join(split_subtokens(i.value)) for i in tokens if not i.value in modifiers]", "'.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower()) return source, target_pred def process_file(file_path, data_file_role,", "failed: return [' _ '.join(split_subtokens(i)) for i in tokens if", "partial(process_line, target_type, max_targets, max_nodes) with mp.Pool(64) as pool: if data_file_role", "data_role in zip([train_data_path, test_data_path, val_data_path], ['train', 'test', 'val']): process_file(file_path=data_file_path, data_file_role=data_role,", "if len(sub) > 0]) def process_line(target_type, max_targets, max_nodes, line): obj", "# Find words in a string. Order matters! [A-Z]+(?=[A-Z][a-z]) |", "max_targets, max_nodes, line) for line in file] for source_seq, target_seq", "for line in file] else: examples = pool.imap_unordered(subtokenize_line, file, chunksize=100)", "target_type, data_file_role) with open(source_output_path, 'w') as source_output_file: with open(target_output_path, 'w')", "line): obj = json.loads(line) left_context = obj['left_context'] right_context = obj['right_context']", "'\\n') target_output_file.write(target_seq + '\\n') total_examples += 1 #print(source_seq, target_seq) print('File:", "'()'))[:-2] except: try: tokens = list(javalang.tokenizer.tokenize('(' + s + ')'))[1:-1]", "'.join(subtokenize(target_seq)).lower() elif target_type is TargetType.tree: target_pred = subtokenize_tree(obj['linearized_tree']) source =", "file_path) print('Total examples: ' + str(total_examples)) if __name__ == '__main__':", "target_type, max_targets, max_nodes) with mp.Pool(64) as pool: if data_file_role in", "target_output_file: with open(file_path, 'r') as file: subtokenize_line = partial(process_line, target_type,", "i in tokens if not i.value in modifiers] def subtokenize_tree(s):", "dest=\"target_type\", type=TargetType.from_string, choices=list(TargetType), required=True) parser.add_argument(\"--max_targets\", dest=\"max_targets\", type=int, required=False, default=40) parser.add_argument(\"--max_nodes\",", "'static'] class TargetType(Enum): seq = auto() tree = auto() @staticmethod", "1 #print(source_seq, target_seq) print('File: ' + file_path) print('Total examples: '", "| # Numbers _ | \\\" | .+ ''', re.VERBOSE)", "dest=\"test_data_path\", help=\"path to test data file\", required=True) parser.add_argument(\"-vd\", \"--val_data\", dest=\"val_data_path\",", "print('File: ' + file_path) print('Total examples: ' + str(total_examples)) if", "target_type, data_file_role) target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role) with open(source_output_path, 'w')", "s) if len(sub) > 0]) def process_line(target_type, max_targets, max_nodes, line):", "target_type, max_targets, max_nodes): total_examples = 0 source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type,", "> 0]) def process_line(target_type, max_targets, max_nodes, line): obj = json.loads(line)", "max_targets, max_nodes, line) for line in file] else: examples =", "to test data file\", required=True) parser.add_argument(\"-vd\", \"--val_data\", dest=\"val_data_path\", help=\"path to", "auto() tree = auto() @staticmethod def from_string(s): try: return TargetType[s]", "name - the base name for the created dataset\", metavar=\"FILE\",", "'{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role) with open(source_output_path, 'w') as source_output_file: with open(target_output_path,", "for sub in re.split(TREE_SPLIT, s) if len(sub) > 0]) def", "is TargetType.tree: target_pred = subtokenize_tree(obj['linearized_tree']) source = '{} {} {}'.format('", "= list(javalang.tokenizer.tokenize(s)) except: try: tokens = list(javalang.tokenizer.tokenize(s + '()'))[:-2] except:", "None if target_type is TargetType.seq: target_pred = ' '.join(subtokenize(target_seq)).lower() elif", "if max_nodes is not None and num_nodes > max_nodes: return", "data_file_path, data_role in zip([train_data_path, test_data_path, val_data_path], ['train', 'test', 'val']): process_file(file_path=data_file_path,", "subtokenize_line = partial(process_line, target_type, max_targets, max_nodes) with mp.Pool(64) as pool:", "# All upper case before a capitalized word [A-Z]?[a-z]+ |", "and num_targets > max_targets: return None, None if max_nodes is", "from functools import partial PRED_TOKEN = 'PRED' modifiers = ['public',", "as mp import re from argparse import ArgumentParser from enum", "for i in tokens if not i.value in modifiers] def", "subtokenize_tree(s): return ' '.join([sub for sub in re.split(TREE_SPLIT, s) if", "in examples: if source_seq is None or target_seq is None:", "max_targets: return None, None if max_nodes is not None and", "as pool: if data_file_role in ['test', 'val']: examples = [process_line(target_type,", "= args.test_data_path val_data_path = args.val_data_path for data_file_path, data_role in zip([train_data_path,", "except: tokens = s.split() failed = True if failed: return", "test_data_path = args.test_data_path val_data_path = args.val_data_path for data_file_path, data_role in", "= parser.parse_args() train_data_path = args.train_data_path test_data_path = args.test_data_path val_data_path =", "TargetType[s] except KeyError: raise ValueError() target_type = TargetType.seq RE_WORDS =", "target_seq in examples: if source_seq is None or target_seq is", "== '__main__': parser = ArgumentParser() parser.add_argument(\"-trd\", \"--train_data\", dest=\"train_data_path\", help=\"path to", "file\", required=True) parser.add_argument(\"-ted\", \"--test_data\", dest=\"test_data_path\", help=\"path to test data file\",", "+ str(total_examples)) if __name__ == '__main__': parser = ArgumentParser() parser.add_argument(\"-trd\",", "= obj['target_seq'] num_targets = obj['num_targets'] num_nodes = obj['num_nodes'] if max_targets", "dest=\"train_data_path\", help=\"path to training data file\", required=True) parser.add_argument(\"-ted\", \"--test_data\", dest=\"test_data_path\",", "= list(javalang.tokenizer.tokenize(s + '()'))[:-2] except: try: tokens = list(javalang.tokenizer.tokenize('(' +", "parser.add_argument(\"--max_nodes\", dest=\"max_nodes\", type=int, required=False, default=None) parser.add_argument('--local', action='store_true') args = parser.parse_args()", "subtok == '_'] def subtokenize(s): failed = False try: tokens", "' '.join(subtokenize(right_context)[:200]).lower()) return source, target_pred def process_file(file_path, data_file_role, dataset_name, target_type,", "if failed: return [' _ '.join(split_subtokens(i)) for i in tokens", "with open(file_path, 'r') as file: subtokenize_line = partial(process_line, target_type, max_targets,", "json import multiprocessing as mp import re from argparse import", "True if failed: return [' _ '.join(split_subtokens(i)) for i in", "argparse import ArgumentParser from enum import Enum, auto import javalang", "not None and num_nodes > max_nodes: return None, None if", "+ s + ')'))[1:-1] except: tokens = s.split() failed =", "'__main__': parser = ArgumentParser() parser.add_argument(\"-trd\", \"--train_data\", dest=\"train_data_path\", help=\"path to training", "auto() @staticmethod def from_string(s): try: return TargetType[s] except KeyError: raise", "None: continue source_output_file.write(source_seq + '\\n') target_output_file.write(target_seq + '\\n') total_examples +=", "for data_file_path, data_role in zip([train_data_path, test_data_path, val_data_path], ['train', 'test', 'val']):", "target_seq is None: continue source_output_file.write(source_seq + '\\n') target_output_file.write(target_seq + '\\n')", "import re from argparse import ArgumentParser from enum import Enum,", "action='store_true') args = parser.parse_args() train_data_path = args.train_data_path test_data_path = args.test_data_path", "= False try: tokens = list(javalang.tokenizer.tokenize(s)) except: try: tokens =", "in file] for source_seq, target_seq in examples: if source_seq is", "obj = json.loads(line) left_context = obj['left_context'] right_context = obj['right_context'] target_seq", "TargetType.seq RE_WORDS = re.compile(r''' # Find words in a string.", "not i in modifiers] else: return [' _ '.join(split_subtokens(i.value)) for", "data_file_role) target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role) with open(source_output_path, 'w') as", "list(javalang.tokenizer.tokenize(s + '()'))[:-2] except: try: tokens = list(javalang.tokenizer.tokenize('(' + s", "try: return TargetType[s] except KeyError: raise ValueError() target_type = TargetType.seq", "None, None if max_nodes is not None and num_nodes >", "\"--val_data\", dest=\"val_data_path\", help=\"path to validation data file\", required=True) parser.add_argument(\"-o\", \"--output_name\",", "before a capitalized word [A-Z]?[a-z]+ | # Capitalized words /", "required=True) parser.add_argument(\"-ted\", \"--test_data\", dest=\"test_data_path\", help=\"path to test data file\", required=True)", "split_subtokens(str): return [subtok for subtok in RE_WORDS.findall(str) if not subtok", "file] for source_seq, target_seq in examples: if source_seq is None", "= s.split() failed = True if failed: return [' _", "re.compile(r'([(),])') def split_subtokens(str): return [subtok for subtok in RE_WORDS.findall(str) if", "def subtokenize(s): failed = False try: tokens = list(javalang.tokenizer.tokenize(s)) except:", "PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower()) return source, target_pred def process_file(file_path, data_file_role, dataset_name,", "examples = [process_line(target_type, max_targets, max_nodes, line) for line in file]", "type=int, required=False, default=40) parser.add_argument(\"--max_nodes\", dest=\"max_nodes\", type=int, required=False, default=None) parser.add_argument('--local', action='store_true')", "open(file_path, 'r') as file: subtokenize_line = partial(process_line, target_type, max_targets, max_nodes)", "with open(target_output_path, 'w') as target_output_file: with open(file_path, 'r') as file:", "['public', 'private', 'protected', 'static'] class TargetType(Enum): seq = auto() tree", "parser.add_argument(\"--max_targets\", dest=\"max_targets\", type=int, required=False, default=40) parser.add_argument(\"--max_nodes\", dest=\"max_nodes\", type=int, required=False, default=None)", "re.VERBOSE) TREE_SPLIT = re.compile(r'([(),])') def split_subtokens(str): return [subtok for subtok", "ArgumentParser from enum import Enum, auto import javalang from functools", "+ '\\n') total_examples += 1 #print(source_seq, target_seq) print('File: ' +", "return TargetType[s] except KeyError: raise ValueError() target_type = TargetType.seq RE_WORDS", "parser.parse_args() train_data_path = args.train_data_path test_data_path = args.test_data_path val_data_path = args.val_data_path", "[' _ '.join(split_subtokens(i)) for i in tokens if not i", "None if max_nodes is not None and num_nodes > max_nodes:", "dest=\"val_data_path\", help=\"path to validation data file\", required=True) parser.add_argument(\"-o\", \"--output_name\", dest=\"output_name\",", "0 source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role) target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type,", "in ['test', 'val']: examples = [process_line(target_type, max_targets, max_nodes, line) for", "'private', 'protected', 'static'] class TargetType(Enum): seq = auto() tree =", "RE_WORDS.findall(str) if not subtok == '_'] def subtokenize(s): failed =", "choices=list(TargetType), required=True) parser.add_argument(\"--max_targets\", dest=\"max_targets\", type=int, required=False, default=40) parser.add_argument(\"--max_nodes\", dest=\"max_nodes\", type=int,", "all lower case [A-Z]+ | # All upper case \\d+", "is None or target_seq is None: continue source_output_file.write(source_seq + '\\n')", "right_context = obj['right_context'] target_seq = obj['target_seq'] num_targets = obj['num_targets'] num_nodes", "{}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower()) return source, target_pred def process_file(file_path,", "if source_seq is None or target_seq is None: continue source_output_file.write(source_seq", "i.value in modifiers] def subtokenize_tree(s): return ' '.join([sub for sub", "help=\"path to training data file\", required=True) parser.add_argument(\"-ted\", \"--test_data\", dest=\"test_data_path\", help=\"path", "from enum import Enum, auto import javalang from functools import", "parser.add_argument(\"-ted\", \"--test_data\", dest=\"test_data_path\", help=\"path to test data file\", required=True) parser.add_argument(\"-vd\",", "matters! [A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized", "# Numbers _ | \\\" | .+ ''', re.VERBOSE) TREE_SPLIT", "PRED_TOKEN = 'PRED' modifiers = ['public', 'private', 'protected', 'static'] class", "total_examples = 0 source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role) target_output_path =", "'.join([sub for sub in re.split(TREE_SPLIT, s) if len(sub) > 0])", "metavar=\"FILE\", required=True, default='data') parser.add_argument(\"--target_type\", dest=\"target_type\", type=TargetType.from_string, choices=list(TargetType), required=True) parser.add_argument(\"--max_targets\", dest=\"max_targets\",", "with mp.Pool(64) as pool: if data_file_role in ['test', 'val']: examples", "'w') as source_output_file: with open(target_output_path, 'w') as target_output_file: with open(file_path,", "in modifiers] else: return [' _ '.join(split_subtokens(i.value)) for i in", "'w') as target_output_file: with open(file_path, 'r') as file: subtokenize_line =", "args = parser.parse_args() train_data_path = args.train_data_path test_data_path = args.test_data_path val_data_path", "+ file_path) print('Total examples: ' + str(total_examples)) if __name__ ==", "created dataset\", metavar=\"FILE\", required=True, default='data') parser.add_argument(\"--target_type\", dest=\"target_type\", type=TargetType.from_string, choices=list(TargetType), required=True)", "examples = pool.imap_unordered(subtokenize_line, file, chunksize=100) #examples = [process_line(target_type, max_targets, max_nodes,", "False try: tokens = list(javalang.tokenizer.tokenize(s)) except: try: tokens = list(javalang.tokenizer.tokenize(s", "validation data file\", required=True) parser.add_argument(\"-o\", \"--output_name\", dest=\"output_name\", help=\"output name -", "[A-Z]+ | # All upper case \\d+ | # Numbers", "= '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role) with open(source_output_path, 'w') as source_output_file: with", "file] else: examples = pool.imap_unordered(subtokenize_line, file, chunksize=100) #examples = [process_line(target_type,", "obj['right_context'] target_seq = obj['target_seq'] num_targets = obj['num_targets'] num_nodes = obj['num_nodes']", "with open(source_output_path, 'w') as source_output_file: with open(target_output_path, 'w') as target_output_file:", "== '_'] def subtokenize(s): failed = False try: tokens =", "[process_line(target_type, max_targets, max_nodes, line) for line in file] for source_seq,", "None, None if target_type is TargetType.seq: target_pred = ' '.join(subtokenize(target_seq)).lower()", "default=None) parser.add_argument('--local', action='store_true') args = parser.parse_args() train_data_path = args.train_data_path test_data_path", "target_type is TargetType.seq: target_pred = ' '.join(subtokenize(target_seq)).lower() elif target_type is", "source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role) target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role)", "s + ')'))[1:-1] except: tokens = s.split() failed = True", "num_targets > max_targets: return None, None if max_nodes is not", "required=True) parser.add_argument(\"-vd\", \"--val_data\", dest=\"val_data_path\", help=\"path to validation data file\", required=True)", "'.join(split_subtokens(i)) for i in tokens if not i in modifiers]", "return [' _ '.join(split_subtokens(i)) for i in tokens if not", "multiprocessing as mp import re from argparse import ArgumentParser from", "dest=\"max_nodes\", type=int, required=False, default=None) parser.add_argument('--local', action='store_true') args = parser.parse_args() train_data_path", "process_line(target_type, max_targets, max_nodes, line): obj = json.loads(line) left_context = obj['left_context']", "' '.join([sub for sub in re.split(TREE_SPLIT, s) if len(sub) >", "max_targets is not None and num_targets > max_targets: return None,", "except: try: tokens = list(javalang.tokenizer.tokenize('(' + s + ')'))[1:-1] except:", "max_nodes is not None and num_nodes > max_nodes: return None,", "'{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower()) return source, target_pred", "[process_line(target_type, max_targets, max_nodes, line) for line in file] else: examples", "= [process_line(target_type, max_targets, max_nodes, line) for line in file] for", "Order matters! [A-Z]+(?=[A-Z][a-z]) | # All upper case before a", "base name for the created dataset\", metavar=\"FILE\", required=True, default='data') parser.add_argument(\"--target_type\",", "''', re.VERBOSE) TREE_SPLIT = re.compile(r'([(),])') def split_subtokens(str): return [subtok for", "type=int, required=False, default=None) parser.add_argument('--local', action='store_true') args = parser.parse_args() train_data_path =", "a string. Order matters! [A-Z]+(?=[A-Z][a-z]) | # All upper case", "lower case [A-Z]+ | # All upper case \\d+ |", "Capitalized words / all lower case [A-Z]+ | # All", "line) for line in file] else: examples = pool.imap_unordered(subtokenize_line, file,", "parser.add_argument(\"--target_type\", dest=\"target_type\", type=TargetType.from_string, choices=list(TargetType), required=True) parser.add_argument(\"--max_targets\", dest=\"max_targets\", type=int, required=False, default=40)", "for source_seq, target_seq in examples: if source_seq is None or", "source_seq is None or target_seq is None: continue source_output_file.write(source_seq +", "for line in file] for source_seq, target_seq in examples: if", "for subtok in RE_WORDS.findall(str) if not subtok == '_'] def", "target_seq = obj['target_seq'] num_targets = obj['num_targets'] num_nodes = obj['num_nodes'] if", "for i in tokens if not i in modifiers] else:", "in a string. Order matters! [A-Z]+(?=[A-Z][a-z]) | # All upper", "import ArgumentParser from enum import Enum, auto import javalang from", "= '{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower()) return source,", "upper case before a capitalized word [A-Z]?[a-z]+ | # Capitalized", "target_type = TargetType.seq RE_WORDS = re.compile(r''' # Find words in", "not i.value in modifiers] def subtokenize_tree(s): return ' '.join([sub for", "is None: continue source_output_file.write(source_seq + '\\n') target_output_file.write(target_seq + '\\n') total_examples", "as source_output_file: with open(target_output_path, 'w') as target_output_file: with open(file_path, 'r')", "source, target_pred def process_file(file_path, data_file_role, dataset_name, target_type, max_targets, max_nodes): total_examples", "data file\", required=True) parser.add_argument(\"-o\", \"--output_name\", dest=\"output_name\", help=\"output name - the", "| # Capitalized words / all lower case [A-Z]+ |", "and num_nodes > max_nodes: return None, None if target_type is", "| # All upper case \\d+ | # Numbers _", "partial PRED_TOKEN = 'PRED' modifiers = ['public', 'private', 'protected', 'static']", "file\", required=True) parser.add_argument(\"-vd\", \"--val_data\", dest=\"val_data_path\", help=\"path to validation data file\",", "test_data_path, val_data_path], ['train', 'test', 'val']): process_file(file_path=data_file_path, data_file_role=data_role, dataset_name=args.output_name, target_type=args.target_type, max_targets=args.max_targets,", "required=False, default=None) parser.add_argument('--local', action='store_true') args = parser.parse_args() train_data_path = args.train_data_path", "_ '.join(split_subtokens(i)) for i in tokens if not i in", "def split_subtokens(str): return [subtok for subtok in RE_WORDS.findall(str) if not", "@staticmethod def from_string(s): try: return TargetType[s] except KeyError: raise ValueError()", "= ' '.join(subtokenize(target_seq)).lower() elif target_type is TargetType.tree: target_pred = subtokenize_tree(obj['linearized_tree'])", "\"--train_data\", dest=\"train_data_path\", help=\"path to training data file\", required=True) parser.add_argument(\"-ted\", \"--test_data\",", "Find words in a string. Order matters! [A-Z]+(?=[A-Z][a-z]) | #", "if target_type is TargetType.seq: target_pred = ' '.join(subtokenize(target_seq)).lower() elif target_type", "chunksize=100) #examples = [process_line(target_type, max_targets, max_nodes, line) for line in", "file: subtokenize_line = partial(process_line, target_type, max_targets, max_nodes) with mp.Pool(64) as", "not None and num_targets > max_targets: return None, None if", "capitalized word [A-Z]?[a-z]+ | # Capitalized words / all lower", "\"--test_data\", dest=\"test_data_path\", help=\"path to test data file\", required=True) parser.add_argument(\"-vd\", \"--val_data\",", "modifiers] else: return [' _ '.join(split_subtokens(i.value)) for i in tokens", "= auto() tree = auto() @staticmethod def from_string(s): try: return", "print('Total examples: ' + str(total_examples)) if __name__ == '__main__': parser", "KeyError: raise ValueError() target_type = TargetType.seq RE_WORDS = re.compile(r''' #", "case [A-Z]+ | # All upper case \\d+ | #", "if __name__ == '__main__': parser = ArgumentParser() parser.add_argument(\"-trd\", \"--train_data\", dest=\"train_data_path\",", "tokens = list(javalang.tokenizer.tokenize('(' + s + ')'))[1:-1] except: tokens =", "i in modifiers] else: return [' _ '.join(split_subtokens(i.value)) for i", "training data file\", required=True) parser.add_argument(\"-ted\", \"--test_data\", dest=\"test_data_path\", help=\"path to test", "+ '()'))[:-2] except: try: tokens = list(javalang.tokenizer.tokenize('(' + s +", "obj['target_seq'] num_targets = obj['num_targets'] num_nodes = obj['num_nodes'] if max_targets is", "help=\"output name - the base name for the created dataset\",", "[A-Z]?[a-z]+ | # Capitalized words / all lower case [A-Z]+", "source_seq, target_seq in examples: if source_seq is None or target_seq", "dest=\"output_name\", help=\"output name - the base name for the created", "if max_targets is not None and num_targets > max_targets: return", "enum import Enum, auto import javalang from functools import partial", "_ | \\\" | .+ ''', re.VERBOSE) TREE_SPLIT = re.compile(r'([(),])')", "try: tokens = list(javalang.tokenizer.tokenize(s)) except: try: tokens = list(javalang.tokenizer.tokenize(s +", "zip([train_data_path, test_data_path, val_data_path], ['train', 'test', 'val']): process_file(file_path=data_file_path, data_file_role=data_role, dataset_name=args.output_name, target_type=args.target_type,", "'PRED' modifiers = ['public', 'private', 'protected', 'static'] class TargetType(Enum): seq", "= list(javalang.tokenizer.tokenize('(' + s + ')'))[1:-1] except: tokens = s.split()", "failed = True if failed: return [' _ '.join(split_subtokens(i)) for", "if not i.value in modifiers] def subtokenize_tree(s): return ' '.join([sub", "' '.join(subtokenize(target_seq)).lower() elif target_type is TargetType.tree: target_pred = subtokenize_tree(obj['linearized_tree']) source", "TargetType.tree: target_pred = subtokenize_tree(obj['linearized_tree']) source = '{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(),", "#examples = [process_line(target_type, max_targets, max_nodes, line) for line in file]", "')'))[1:-1] except: tokens = s.split() failed = True if failed:", "None and num_nodes > max_nodes: return None, None if target_type", "source = '{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower()) return", "open(source_output_path, 'w') as source_output_file: with open(target_output_path, 'w') as target_output_file: with", "total_examples += 1 #print(source_seq, target_seq) print('File: ' + file_path) print('Total", "javalang from functools import partial PRED_TOKEN = 'PRED' modifiers =", "max_nodes, line): obj = json.loads(line) left_context = obj['left_context'] right_context =", "tree = auto() @staticmethod def from_string(s): try: return TargetType[s] except", "Enum, auto import javalang from functools import partial PRED_TOKEN =", "\\\" | .+ ''', re.VERBOSE) TREE_SPLIT = re.compile(r'([(),])') def split_subtokens(str):", "in re.split(TREE_SPLIT, s) if len(sub) > 0]) def process_line(target_type, max_targets,", "def process_line(target_type, max_targets, max_nodes, line): obj = json.loads(line) left_context =", "= 0 source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role) target_output_path = '{}.{}.{}.target.txt'.format(dataset_name,", "name for the created dataset\", metavar=\"FILE\", required=True, default='data') parser.add_argument(\"--target_type\", dest=\"target_type\",", "[' _ '.join(split_subtokens(i.value)) for i in tokens if not i.value", "data_file_role, dataset_name, target_type, max_targets, max_nodes): total_examples = 0 source_output_path =", "file\", required=True) parser.add_argument(\"-o\", \"--output_name\", dest=\"output_name\", help=\"output name - the base", "line in file] else: examples = pool.imap_unordered(subtokenize_line, file, chunksize=100) #examples", "= args.val_data_path for data_file_path, data_role in zip([train_data_path, test_data_path, val_data_path], ['train',", "args.val_data_path for data_file_path, data_role in zip([train_data_path, test_data_path, val_data_path], ['train', 'test',", "max_nodes) with mp.Pool(64) as pool: if data_file_role in ['test', 'val']:", "'val']: examples = [process_line(target_type, max_targets, max_nodes, line) for line in", "line) for line in file] for source_seq, target_seq in examples:", "import javalang from functools import partial PRED_TOKEN = 'PRED' modifiers", "return ' '.join([sub for sub in re.split(TREE_SPLIT, s) if len(sub)", "parser.add_argument('--local', action='store_true') args = parser.parse_args() train_data_path = args.train_data_path test_data_path =", "line in file] for source_seq, target_seq in examples: if source_seq", "mp import re from argparse import ArgumentParser from enum import", "'protected', 'static'] class TargetType(Enum): seq = auto() tree = auto()", "the base name for the created dataset\", metavar=\"FILE\", required=True, default='data')", "' + file_path) print('Total examples: ' + str(total_examples)) if __name__", "num_nodes > max_nodes: return None, None if target_type is TargetType.seq:", "dataset_name, target_type, max_targets, max_nodes): total_examples = 0 source_output_path = '{}.{}.{}.source.txt'.format(dataset_name,", "= True if failed: return [' _ '.join(split_subtokens(i)) for i", "def from_string(s): try: return TargetType[s] except KeyError: raise ValueError() target_type", "elif target_type is TargetType.tree: target_pred = subtokenize_tree(obj['linearized_tree']) source = '{}", "= args.train_data_path test_data_path = args.test_data_path val_data_path = args.val_data_path for data_file_path,", "import Enum, auto import javalang from functools import partial PRED_TOKEN", "| # All upper case before a capitalized word [A-Z]?[a-z]+", "TargetType.seq: target_pred = ' '.join(subtokenize(target_seq)).lower() elif target_type is TargetType.tree: target_pred", "else: examples = pool.imap_unordered(subtokenize_line, file, chunksize=100) #examples = [process_line(target_type, max_targets,", "upper case \\d+ | # Numbers _ | \\\" |", "case before a capitalized word [A-Z]?[a-z]+ | # Capitalized words", "target_pred = subtokenize_tree(obj['linearized_tree']) source = '{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN,", "| .+ ''', re.VERBOSE) TREE_SPLIT = re.compile(r'([(),])') def split_subtokens(str): return", "subtok in RE_WORDS.findall(str) if not subtok == '_'] def subtokenize(s):", "the created dataset\", metavar=\"FILE\", required=True, default='data') parser.add_argument(\"--target_type\", dest=\"target_type\", type=TargetType.from_string, choices=list(TargetType),", "words / all lower case [A-Z]+ | # All upper", "return source, target_pred def process_file(file_path, data_file_role, dataset_name, target_type, max_targets, max_nodes):", "= pool.imap_unordered(subtokenize_line, file, chunksize=100) #examples = [process_line(target_type, max_targets, max_nodes, line)", "parser.add_argument(\"-vd\", \"--val_data\", dest=\"val_data_path\", help=\"path to validation data file\", required=True) parser.add_argument(\"-o\",", "# All upper case \\d+ | # Numbers _ |", "target_type is TargetType.tree: target_pred = subtokenize_tree(obj['linearized_tree']) source = '{} {}", "to validation data file\", required=True) parser.add_argument(\"-o\", \"--output_name\", dest=\"output_name\", help=\"output name", "parser = ArgumentParser() parser.add_argument(\"-trd\", \"--train_data\", dest=\"train_data_path\", help=\"path to training data", "= obj['right_context'] target_seq = obj['target_seq'] num_targets = obj['num_targets'] num_nodes =", "ArgumentParser() parser.add_argument(\"-trd\", \"--train_data\", dest=\"train_data_path\", help=\"path to training data file\", required=True)", "from argparse import ArgumentParser from enum import Enum, auto import", "[subtok for subtok in RE_WORDS.findall(str) if not subtok == '_']", "pool.imap_unordered(subtokenize_line, file, chunksize=100) #examples = [process_line(target_type, max_targets, max_nodes, line) for", "+ ')'))[1:-1] except: tokens = s.split() failed = True if", "try: tokens = list(javalang.tokenizer.tokenize(s + '()'))[:-2] except: try: tokens =", "for the created dataset\", metavar=\"FILE\", required=True, default='data') parser.add_argument(\"--target_type\", dest=\"target_type\", type=TargetType.from_string,", "tokens = list(javalang.tokenizer.tokenize(s)) except: try: tokens = list(javalang.tokenizer.tokenize(s + '()'))[:-2]", "= re.compile(r''' # Find words in a string. Order matters!", "= obj['left_context'] right_context = obj['right_context'] target_seq = obj['target_seq'] num_targets =", "data_file_role) with open(source_output_path, 'w') as source_output_file: with open(target_output_path, 'w') as", "+ '\\n') target_output_file.write(target_seq + '\\n') total_examples += 1 #print(source_seq, target_seq)", "auto import javalang from functools import partial PRED_TOKEN = 'PRED'", "= ['public', 'private', 'protected', 'static'] class TargetType(Enum): seq = auto()", "dest=\"max_targets\", type=int, required=False, default=40) parser.add_argument(\"--max_nodes\", dest=\"max_nodes\", type=int, required=False, default=None) parser.add_argument('--local',", "in zip([train_data_path, test_data_path, val_data_path], ['train', 'test', 'val']): process_file(file_path=data_file_path, data_file_role=data_role, dataset_name=args.output_name,", "sub in re.split(TREE_SPLIT, s) if len(sub) > 0]) def process_line(target_type,", "None and num_targets > max_targets: return None, None if max_nodes", "/ all lower case [A-Z]+ | # All upper case", "required=True, default='data') parser.add_argument(\"--target_type\", dest=\"target_type\", type=TargetType.from_string, choices=list(TargetType), required=True) parser.add_argument(\"--max_targets\", dest=\"max_targets\", type=int,", "if not i in modifiers] else: return [' _ '.join(split_subtokens(i.value))", "data_file_role in ['test', 'val']: examples = [process_line(target_type, max_targets, max_nodes, line)", ".+ ''', re.VERBOSE) TREE_SPLIT = re.compile(r'([(),])') def split_subtokens(str): return [subtok", "try: tokens = list(javalang.tokenizer.tokenize('(' + s + ')'))[1:-1] except: tokens", "test data file\", required=True) parser.add_argument(\"-vd\", \"--val_data\", dest=\"val_data_path\", help=\"path to validation", "> max_targets: return None, None if max_nodes is not None", "| \\\" | .+ ''', re.VERBOSE) TREE_SPLIT = re.compile(r'([(),])') def", "{} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower()) return source, target_pred def", "max_targets, max_nodes) with mp.Pool(64) as pool: if data_file_role in ['test',", "RE_WORDS = re.compile(r''' # Find words in a string. Order", "subtokenize_tree(obj['linearized_tree']) source = '{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower())", "process_file(file_path, data_file_role, dataset_name, target_type, max_targets, max_nodes): total_examples = 0 source_output_path", "= '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role) target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role) with", "a capitalized word [A-Z]?[a-z]+ | # Capitalized words / all", "source_output_file: with open(target_output_path, 'w') as target_output_file: with open(file_path, 'r') as", "# Capitalized words / all lower case [A-Z]+ | #", "[A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized word", "max_targets, max_nodes, line): obj = json.loads(line) left_context = obj['left_context'] right_context", "'r') as file: subtokenize_line = partial(process_line, target_type, max_targets, max_nodes) with", "mp.Pool(64) as pool: if data_file_role in ['test', 'val']: examples =", "= TargetType.seq RE_WORDS = re.compile(r''' # Find words in a", "file, chunksize=100) #examples = [process_line(target_type, max_targets, max_nodes, line) for line", "not subtok == '_'] def subtokenize(s): failed = False try:", "train_data_path = args.train_data_path test_data_path = args.test_data_path val_data_path = args.val_data_path for", "len(sub) > 0]) def process_line(target_type, max_targets, max_nodes, line): obj =", "max_nodes): total_examples = 0 source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role) target_output_path", "= partial(process_line, target_type, max_targets, max_nodes) with mp.Pool(64) as pool: if", "'.join(subtokenize(right_context)[:200]).lower()) return source, target_pred def process_file(file_path, data_file_role, dataset_name, target_type, max_targets,", "return None, None if max_nodes is not None and num_nodes", "return [' _ '.join(split_subtokens(i.value)) for i in tokens if not", "= [process_line(target_type, max_targets, max_nodes, line) for line in file] else:", "'_'] def subtokenize(s): failed = False try: tokens = list(javalang.tokenizer.tokenize(s))", "in file] else: examples = pool.imap_unordered(subtokenize_line, file, chunksize=100) #examples =", "is TargetType.seq: target_pred = ' '.join(subtokenize(target_seq)).lower() elif target_type is TargetType.tree:", "continue source_output_file.write(source_seq + '\\n') target_output_file.write(target_seq + '\\n') total_examples += 1", "' + str(total_examples)) if __name__ == '__main__': parser = ArgumentParser()", "= ArgumentParser() parser.add_argument(\"-trd\", \"--train_data\", dest=\"train_data_path\", help=\"path to training data file\",", "target_output_file.write(target_seq + '\\n') total_examples += 1 #print(source_seq, target_seq) print('File: '", "in modifiers] def subtokenize_tree(s): return ' '.join([sub for sub in", "as file: subtokenize_line = partial(process_line, target_type, max_targets, max_nodes) with mp.Pool(64)", "modifiers = ['public', 'private', 'protected', 'static'] class TargetType(Enum): seq =", "TREE_SPLIT = re.compile(r'([(),])') def split_subtokens(str): return [subtok for subtok in" ]
[ "stable and efficient computation of multinomial coefficients comes from <NAME>.", "C code from <NAME>. Cool-lex order is similar to colexicographical", "tools to permute multisets without loops or hash tables and", "also generates the integer partitions of a positive, non-zero integer", "Discrete Algorithms, New York, United States. The permutation code is", "for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack", "York, United States. The permutation code is distributed without restrictions.", "url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/Archive/multicool\" version('0.1-10', sha256='5bb0cb0d9eb64420c862877247a79bb0afadacfe23262ec8c3fa26e5e34d6ff9') version('0.1-9', sha256='bdf92571cef1b649952d155395a92b8683099ee13114f73a9d41fc5d7d49d329')", "The C++ code for this is based on Python code", "code from <NAME> which can be found here <https://jeromekelleher.net/tag/integer-partitions.html>. The", "which can be found here <https://jeromekelleher.net/tag/integer-partitions.html>. The C++ code and", "generates the integer partitions of a positive, non-zero integer n.", "<NAME>. (2009) <DOI:10.1145/1496770.1496877> Loopless Generation of Multiset Permutations by Prefix", "<NAME>. Cool-lex order is similar to colexicographical order. The algorithm", "Python code are distributed without conditions.\"\"\" homepage = \"https://cloud.r-project.org/package=multicool\" url", "MIT) from spack import * class RMulticool(RPackage): \"\"\"Permutations of multisets", "by Prefix Shifts. Symposium on Discrete Algorithms, New York, United", "code and Python code are distributed without conditions.\"\"\" homepage =", "is described in <NAME>. (2009) <DOI:10.1145/1496770.1496877> Loopless Generation of Multiset", "partitions. The permutation functions are based on C code from", "of a positive, non-zero integer n. The C++ code for", "from <NAME>. The code can be download from <http://tamivox.org/dave/multinomial/code.html> and", "described in <NAME>. (2009) <DOI:10.1145/1496770.1496877> Loopless Generation of Multiset Permutations", "is based on Python code from <NAME> which can be", "functions are based on C code from <NAME>. Cool-lex order", "Lawrence Livermore National Security, LLC and other # Spack Project", "SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RMulticool(RPackage):", "without conditions. The package also generates the integer partitions of", "algorithm is described in <NAME>. (2009) <DOI:10.1145/1496770.1496877> Loopless Generation of", "similar to colexicographical order. The algorithm is described in <NAME>.", "can be found here <https://jeromekelleher.net/tag/integer-partitions.html>. The C++ code and Python", "The permutation code is distributed without restrictions. The code for", "= \"https://cloud.r-project.org/package=multicool\" url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/Archive/multicool\" version('0.1-10', sha256='5bb0cb0d9eb64420c862877247a79bb0afadacfe23262ec8c3fa26e5e34d6ff9')", "* class RMulticool(RPackage): \"\"\"Permutations of multisets in cool-lex order A", "<NAME>. The code can be download from <http://tamivox.org/dave/multinomial/code.html> and is", "The code can be download from <http://tamivox.org/dave/multinomial/code.html> and is distributed", "permutation code is distributed without restrictions. The code for stable", "RMulticool(RPackage): \"\"\"Permutations of multisets in cool-lex order A set of", "be found here <https://jeromekelleher.net/tag/integer-partitions.html>. The C++ code and Python code", "are distributed without conditions.\"\"\" homepage = \"https://cloud.r-project.org/package=multicool\" url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz\"", "homepage = \"https://cloud.r-project.org/package=multicool\" url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/Archive/multicool\" version('0.1-10',", "coefficients comes from <NAME>. The code can be download from", "code for stable and efficient computation of multinomial coefficients comes", "is distributed without conditions. The package also generates the integer", "a positive, non-zero integer n. The C++ code for this", "and to generate integer partitions. The permutation functions are based", "Spack Project Developers. See the top-level COPYRIGHT file for details.", "2013-2022 Lawrence Livermore National Security, LLC and other # Spack", "Loopless Generation of Multiset Permutations by Prefix Shifts. Symposium on", "National Security, LLC and other # Spack Project Developers. See", "and Python code are distributed without conditions.\"\"\" homepage = \"https://cloud.r-project.org/package=multicool\"", "without restrictions. The code for stable and efficient computation of", "distributed without restrictions. The code for stable and efficient computation", "generate integer partitions. The permutation functions are based on C", "Security, LLC and other # Spack Project Developers. See the", "cool-lex order A set of tools to permute multisets without", "is similar to colexicographical order. The algorithm is described in", "integer partitions of a positive, non-zero integer n. The C++", "C++ code and Python code are distributed without conditions.\"\"\" homepage", "permute multisets without loops or hash tables and to generate", "to colexicographical order. The algorithm is described in <NAME>. (2009)", "Symposium on Discrete Algorithms, New York, United States. The permutation", "New York, United States. The permutation code is distributed without", "code for this is based on Python code from <NAME>", "order is similar to colexicographical order. The algorithm is described", "conditions.\"\"\" homepage = \"https://cloud.r-project.org/package=multicool\" url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/Archive/multicool\"", "The code for stable and efficient computation of multinomial coefficients", "# # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import *", "to permute multisets without loops or hash tables and to", "integer n. The C++ code for this is based on", "A set of tools to permute multisets without loops or", "colexicographical order. The algorithm is described in <NAME>. (2009) <DOI:10.1145/1496770.1496877>", "the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0", "in <NAME>. (2009) <DOI:10.1145/1496770.1496877> Loopless Generation of Multiset Permutations by", "computation of multinomial coefficients comes from <NAME>. The code can", "<DOI:10.1145/1496770.1496877> Loopless Generation of Multiset Permutations by Prefix Shifts. Symposium", "for stable and efficient computation of multinomial coefficients comes from", "file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from", "# Spack Project Developers. See the top-level COPYRIGHT file for", "download from <http://tamivox.org/dave/multinomial/code.html> and is distributed without conditions. The package", "distributed without conditions. The package also generates the integer partitions", "for this is based on Python code from <NAME> which", "<NAME> which can be found here <https://jeromekelleher.net/tag/integer-partitions.html>. The C++ code", "States. The permutation code is distributed without restrictions. The code", "Python code from <NAME> which can be found here <https://jeromekelleher.net/tag/integer-partitions.html>.", "code are distributed without conditions.\"\"\" homepage = \"https://cloud.r-project.org/package=multicool\" url =", "United States. The permutation code is distributed without restrictions. The", "Livermore National Security, LLC and other # Spack Project Developers.", "code is distributed without restrictions. The code for stable and", "based on C code from <NAME>. Cool-lex order is similar", "The algorithm is described in <NAME>. (2009) <DOI:10.1145/1496770.1496877> Loopless Generation", "multinomial coefficients comes from <NAME>. The code can be download", "<https://jeromekelleher.net/tag/integer-partitions.html>. The C++ code and Python code are distributed without", "\"https://cloud.r-project.org/package=multicool\" url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/Archive/multicool\" version('0.1-10', sha256='5bb0cb0d9eb64420c862877247a79bb0afadacfe23262ec8c3fa26e5e34d6ff9') version('0.1-9',", "from spack import * class RMulticool(RPackage): \"\"\"Permutations of multisets in", "are based on C code from <NAME>. Cool-lex order is", "comes from <NAME>. The code can be download from <http://tamivox.org/dave/multinomial/code.html>", "from <http://tamivox.org/dave/multinomial/code.html> and is distributed without conditions. The package also", "conditions. The package also generates the integer partitions of a", "partitions of a positive, non-zero integer n. The C++ code", "efficient computation of multinomial coefficients comes from <NAME>. The code", "set of tools to permute multisets without loops or hash", "on C code from <NAME>. Cool-lex order is similar to", "permutation functions are based on C code from <NAME>. Cool-lex", "of multisets in cool-lex order A set of tools to", "loops or hash tables and to generate integer partitions. The", "order. The algorithm is described in <NAME>. (2009) <DOI:10.1145/1496770.1496877> Loopless", "hash tables and to generate integer partitions. The permutation functions", "LLC and other # Spack Project Developers. See the top-level", "details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import", "<http://tamivox.org/dave/multinomial/code.html> and is distributed without conditions. The package also generates", "multisets in cool-lex order A set of tools to permute", "in cool-lex order A set of tools to permute multisets", "this is based on Python code from <NAME> which can", "Prefix Shifts. Symposium on Discrete Algorithms, New York, United States.", "found here <https://jeromekelleher.net/tag/integer-partitions.html>. The C++ code and Python code are", "and efficient computation of multinomial coefficients comes from <NAME>. The", "on Python code from <NAME> which can be found here", "OR MIT) from spack import * class RMulticool(RPackage): \"\"\"Permutations of", "of multinomial coefficients comes from <NAME>. The code can be", "from <NAME>. Cool-lex order is similar to colexicographical order. The", "to generate integer partitions. The permutation functions are based on", "Generation of Multiset Permutations by Prefix Shifts. Symposium on Discrete", "positive, non-zero integer n. The C++ code for this is", "Developers. See the top-level COPYRIGHT file for details. # #", "# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other", "of tools to permute multisets without loops or hash tables", "restrictions. The code for stable and efficient computation of multinomial", "C++ code for this is based on Python code from", "code can be download from <http://tamivox.org/dave/multinomial/code.html> and is distributed without", "multisets without loops or hash tables and to generate integer", "Shifts. Symposium on Discrete Algorithms, New York, United States. The", "The C++ code and Python code are distributed without conditions.\"\"\"", "without loops or hash tables and to generate integer partitions.", "be download from <http://tamivox.org/dave/multinomial/code.html> and is distributed without conditions. The", "class RMulticool(RPackage): \"\"\"Permutations of multisets in cool-lex order A set", "tables and to generate integer partitions. The permutation functions are", "import * class RMulticool(RPackage): \"\"\"Permutations of multisets in cool-lex order", "# SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class", "\"https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/Archive/multicool\" version('0.1-10', sha256='5bb0cb0d9eb64420c862877247a79bb0afadacfe23262ec8c3fa26e5e34d6ff9') version('0.1-9', sha256='bdf92571cef1b649952d155395a92b8683099ee13114f73a9d41fc5d7d49d329') depends_on('r-rcpp@0.11.2:', type=('build',", "on Discrete Algorithms, New York, United States. The permutation code", "list_url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/Archive/multicool\" version('0.1-10', sha256='5bb0cb0d9eb64420c862877247a79bb0afadacfe23262ec8c3fa26e5e34d6ff9') version('0.1-9', sha256='bdf92571cef1b649952d155395a92b8683099ee13114f73a9d41fc5d7d49d329') depends_on('r-rcpp@0.11.2:', type=('build', 'run'))", "= \"https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/Archive/multicool\" version('0.1-10', sha256='5bb0cb0d9eb64420c862877247a79bb0afadacfe23262ec8c3fa26e5e34d6ff9') version('0.1-9', sha256='bdf92571cef1b649952d155395a92b8683099ee13114f73a9d41fc5d7d49d329') depends_on('r-rcpp@0.11.2:',", "(Apache-2.0 OR MIT) from spack import * class RMulticool(RPackage): \"\"\"Permutations", "Copyright 2013-2022 Lawrence Livermore National Security, LLC and other #", "the integer partitions of a positive, non-zero integer n. The", "The package also generates the integer partitions of a positive,", "Cool-lex order is similar to colexicographical order. The algorithm is", "<gh_stars>0 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and", "integer partitions. The permutation functions are based on C code", "based on Python code from <NAME> which can be found", "Algorithms, New York, United States. The permutation code is distributed", "spack import * class RMulticool(RPackage): \"\"\"Permutations of multisets in cool-lex", "from <NAME> which can be found here <https://jeromekelleher.net/tag/integer-partitions.html>. The C++", "without conditions.\"\"\" homepage = \"https://cloud.r-project.org/package=multicool\" url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz\" list_url =", "See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier:", "and other # Spack Project Developers. See the top-level COPYRIGHT", "and is distributed without conditions. The package also generates the", "non-zero integer n. The C++ code for this is based", "(2009) <DOI:10.1145/1496770.1496877> Loopless Generation of Multiset Permutations by Prefix Shifts.", "or hash tables and to generate integer partitions. The permutation", "distributed without conditions.\"\"\" homepage = \"https://cloud.r-project.org/package=multicool\" url = \"https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz\" list_url", "code from <NAME>. Cool-lex order is similar to colexicographical order.", "The permutation functions are based on C code from <NAME>.", "can be download from <http://tamivox.org/dave/multinomial/code.html> and is distributed without conditions.", "is distributed without restrictions. The code for stable and efficient", "Project Developers. See the top-level COPYRIGHT file for details. #", "order A set of tools to permute multisets without loops", "Multiset Permutations by Prefix Shifts. Symposium on Discrete Algorithms, New", "package also generates the integer partitions of a positive, non-zero", "here <https://jeromekelleher.net/tag/integer-partitions.html>. The C++ code and Python code are distributed", "other # Spack Project Developers. See the top-level COPYRIGHT file", "\"\"\"Permutations of multisets in cool-lex order A set of tools", "of Multiset Permutations by Prefix Shifts. Symposium on Discrete Algorithms,", "COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT)", "n. The C++ code for this is based on Python", "top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR", "Permutations by Prefix Shifts. Symposium on Discrete Algorithms, New York," ]
[ "if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:',", "related and neighboring rights to this software to the public", "for match in matches: if match not in irc_strings: irc_strings.append(match)", "content) for match in matches: if match not in help_strings:", "ignore any that don't exist ... print(\"nickserv help strings:\", len(help_strings))", "2018 by <NAME> <<EMAIL>> # # To the extent possible", "manually searches the source code, extracts strings and then updates", "strings:\", len(irc_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'), 'w') as f: f.write(json.dumps({k:k", "worldwide. This software is distributed without any warranty. # #", "for k in irc_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\\n')", "files. # Written in 2018 by <NAME> <<EMAIL>> # #", "try: irc_strings.remove(s) except ValueError: # ignore any that don't exist", "chanserv help entries help_strings = [] for subdir, dirs, files", "entries help_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']):", "irc_strings: irc_strings.append(match) matches = re.findall(r'\\.t\\(\\`([^\\`]+)\\`\\)', content) for match in matches:", "software to the public domain # worldwide. This software is", "with open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k", "law, the author(s) have dedicated all copyright # and related", "copyright # and related and neighboring rights to this software", "for s in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore", "ValueError: # ignore any that don't exist ... print(\"nickserv help", "matches: if match not in help_strings: help_strings.append(match) for s in", "re.findall(r'\\`([^\\`]+)\\`', content) for match in matches: if '\\n' in match", "except ValueError: # ignore any that don't exist ... print(\"help", "script updates our translation file with the newest, coolest strings", "string.count('%f'): print(' confirm:', string) # help entries help_strings = []", "directory.\"\"\" import os import re import json from docopt import", "help entries help_strings = [] for subdir, dirs, files in", "in match and match not in help_strings: help_strings.append(match) for s", "< string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string.split('\\n')[0]) #", "# general IRC strings irc_strings = [] for subdir, dirs,", "match not in irc_strings: irc_strings.append(match) for s in ignored_strings: try:", "'w') as f: f.write(json.dumps({k:k for k in irc_strings}, sort_keys=True, indent=2,", "--version updatetranslations.py (-h | --help) Options: <irc-dir> Oragono's irc subdirectory", "(-h | --help) Options: <irc-dir> Oragono's irc subdirectory where the", "is kept. <languages-dir> Languages directory.\"\"\" import os import re import", "If not, see # <http://creativecommons.org/publicdomain/zero/1.0/>. \"\"\"updatetranslations.py Usage: updatetranslations.py run <irc-dir>", "open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`', content) for match in", "strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'), 'w') as f: f.write(json.dumps({k:k", "subdir + os.sep + fname if fname == 'nickserv.go': content", "== 'help.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`',", "+ fname if fname == 'hostserv.go': content = open(filepath, 'r',", "confirm:', string) # hostserv help entries help_strings = [] for", "import yaml ignored_strings = [ 'none', 'saset' ] if __name__", "ignored_strings = [ 'none', 'saset' ] if __name__ == '__main__':", "exist ... print(\"chanserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'),", "that don't exist ... print(\"chanserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'],", "fname if filepath.endswith('.go'): content = open(filepath, 'r', encoding='UTF-8').read() matches =", "os.sep + fname if fname == 'help.go': content = open(filepath,", "'hostserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`', content)", "files: filepath = subdir + os.sep + fname if fname", "ValueError: # ignore any that don't exist ... print(\"irc strings:\",", "': '))) f.write('\\n') for string in help_strings: if 1 <", "'none', 'saset' ] if __name__ == '__main__': arguments = docopt(__doc__,", "have dedicated all copyright # and related and neighboring rights", "'example', 'nickserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings},", "'nickserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True,", "not in irc_strings: irc_strings.append(match) for s in ignored_strings: try: irc_strings.remove(s)", "# hostserv help entries help_strings = [] for subdir, dirs,", "indent=2, separators=(',', ': '))) f.write('\\n') for string in help_strings: if", "dedicated all copyright # and related and neighboring rights to", "open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in", "# updatetranslations.py # # tl;dr this script updates our translation", "if __name__ == '__main__': arguments = docopt(__doc__, version=\"0.1.0\") if arguments['run']:", "in help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except", "as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',',", "irc_strings: irc_strings.append(match) for s in ignored_strings: try: irc_strings.remove(s) except ValueError:", "--help) Options: <irc-dir> Oragono's irc subdirectory where the Go code", "copy of the CC0 Public Domain Dedication along # with", "not in irc_strings: irc_strings.append(match) matches = re.findall(r'\\.t\\(\\`([^\\`]+)\\`\\)', content) for match", "fname if fname == 'chanserv.go': content = open(filepath, 'r', encoding='UTF-8').read()", "subdirectory where the Go code is kept. <languages-dir> Languages directory.\"\"\"", "content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\.t\\(\"((?:[^\"]|\\\\\")+)\"\\)', content) for", "except ValueError: # ignore any that don't exist ... print(\"nickserv", "fname == 'chanserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches =", "open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in", "string) # hostserv help entries help_strings = [] for subdir,", "subdir + os.sep + fname if fname == 'chanserv.go': content", "subdir + os.sep + fname if fname == 'hostserv.go': content", "... print(\"help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'), 'w') as", "f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': ')))", "with open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k", "our translation file with the newest, coolest strings we've added!", "irc subdirectory where the Go code is kept. <languages-dir> Languages", "exist ... print(\"irc strings:\", len(irc_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'), 'w')", "print(' confirm:', string.split('\\n')[0]) # nickserv help entries help_strings = []", "'example', 'irc.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in irc_strings},", "sort_keys=True, indent=2, separators=(',', ': '))) f.write('\\n') for string in help_strings:", "we've added! # it manually searches the source code, extracts", "the newest, coolest strings we've added! # it manually searches", "= docopt(__doc__, version=\"0.1.0\") if arguments['run']: # general IRC strings irc_strings", "# You should have received a copy of the CC0", "it manually searches the source code, extracts strings and then", "docopt(__doc__, version=\"0.1.0\") if arguments['run']: # general IRC strings irc_strings =", "string.count('%d') + string.count('%f'): print(' confirm:', string) # help entries help_strings", "strings we've added! # it manually searches the source code,", "the author(s) have dedicated all copyright # and related and", "that don't exist ... print(\"nickserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'],", "matches: if '\\n' in match and match not in help_strings:", "and neighboring rights to this software to the public domain", "that don't exist ... print(\"help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example',", "in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir +", "updatetranslations.py run <irc-dir> <languages-dir> updatetranslations.py --version updatetranslations.py (-h | --help)", "re.findall(r'\\.t\\(\\`([^\\`]+)\\`\\)', content) for match in matches: if match not in", "<languages-dir> Languages directory.\"\"\" import os import re import json from", "where the Go code is kept. <languages-dir> Languages directory.\"\"\" import", "match in matches: if '\\n' in match and match not", "fname if fname == 'nickserv.go': content = open(filepath, 'r', encoding='UTF-8').read()", "Go code is kept. <languages-dir> Languages directory.\"\"\" import os import", "+ os.sep + fname if fname == 'nickserv.go': content =", "\"\"\"updatetranslations.py Usage: updatetranslations.py run <irc-dir> <languages-dir> updatetranslations.py --version updatetranslations.py (-h", "then updates the language files. # Written in 2018 by", "open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in", "help_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print('", "code, extracts strings and then updates the language files. #", "if match not in help_strings: help_strings.append(match) for s in ignored_strings:", "in matches: if match not in irc_strings: irc_strings.append(match) for s", "separators=(',', ': '))) f.write('\\n') for string in irc_strings: if 1", "match in matches: if match not in irc_strings: irc_strings.append(match) for", "= subdir + os.sep + fname if fname == 'hostserv.go':", "len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'), 'w') as f: f.write(json.dumps({k:k for", "To the extent possible under law, the author(s) have dedicated", "exist ... print(\"hostserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'),", "updatetranslations.py --version updatetranslations.py (-h | --help) Options: <irc-dir> Oragono's irc", "re import json from docopt import docopt import yaml ignored_strings", "ignore any that don't exist ... print(\"hostserv help strings:\", len(help_strings))", "with open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'), 'w') as f: f.write(json.dumps({k:k for k", "for match in matches: if match not in help_strings: help_strings.append(match)", "[ 'none', 'saset' ] if __name__ == '__main__': arguments =", "strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'), 'w') as f: f.write(json.dumps({k:k", "the CC0 Public Domain Dedication along # with this software.", "of the CC0 Public Domain Dedication along # with this", "if fname == 'help.go': content = open(filepath, 'r', encoding='UTF-8').read() matches", "matches = re.findall(r'\\`([^\\`]+)\\`', content) for match in matches: if '\\n'", "python3 # updatetranslations.py # # tl;dr this script updates our", "file with the newest, coolest strings we've added! # it", "# Written in 2018 by <NAME> <<EMAIL>> # # To", "see # <http://creativecommons.org/publicdomain/zero/1.0/>. \"\"\"updatetranslations.py Usage: updatetranslations.py run <irc-dir> <languages-dir> updatetranslations.py", "have received a copy of the CC0 Public Domain Dedication", "files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir", "+ string.count('%f'): print(' confirm:', string) # hostserv help entries help_strings", "import os import re import json from docopt import docopt", "= open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\.t\\(\"((?:[^\"]|\\\\\")+)\"\\)', content) for match", "if '\\n' in match and match not in help_strings: help_strings.append(match)", "any that don't exist ... print(\"chanserv help strings:\", len(help_strings)) with", "+ os.sep + fname if fname == 'hostserv.go': content =", "confirm:', string.split('\\n')[0]) # nickserv help entries help_strings = [] for", "help_strings.remove(s) except ValueError: # ignore any that don't exist ...", "all copyright # and related and neighboring rights to this", "CC0 Public Domain Dedication along # with this software. If", "if match not in irc_strings: irc_strings.append(match) for s in ignored_strings:", "string.count('%f'): print(' confirm:', string.split('\\n')[0]) # nickserv help entries help_strings =", "the extent possible under law, the author(s) have dedicated all", "help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'), 'w') as f:", "match not in help_strings: help_strings.append(match) for s in ignored_strings: try:", "not, see # <http://creativecommons.org/publicdomain/zero/1.0/>. \"\"\"updatetranslations.py Usage: updatetranslations.py run <irc-dir> <languages-dir>", "] if __name__ == '__main__': arguments = docopt(__doc__, version=\"0.1.0\") if", "in matches: if '\\n' in match and match not in", "print(' confirm:', string) # help entries help_strings = [] for", "this software to the public domain # worldwide. This software", "and related and neighboring rights to this software to the", "+ string.count('%d') + string.count('%f'): print(' confirm:', string) # chanserv help", "tl;dr this script updates our translation file with the newest,", "'))) f.write('\\n') for string in irc_strings: if 1 < string.count('%s')", "= re.findall(r'\\`([^\\`]+)\\`', content) for match in matches: if '\\n' in", "+ os.sep + fname if fname == 'help.go': content =", "# <http://creativecommons.org/publicdomain/zero/1.0/>. \"\"\"updatetranslations.py Usage: updatetranslations.py run <irc-dir> <languages-dir> updatetranslations.py --version", "ValueError: # ignore any that don't exist ... print(\"chanserv help", "as f: f.write(json.dumps({k:k for k in irc_strings}, sort_keys=True, indent=2, separators=(',',", "updatetranslations.py (-h | --help) Options: <irc-dir> Oragono's irc subdirectory where", "newest, coolest strings we've added! # it manually searches the", "# ignore any that don't exist ... print(\"nickserv help strings:\",", "the public domain # worldwide. This software is distributed without", "along # with this software. If not, see # <http://creativecommons.org/publicdomain/zero/1.0/>.", "confirm:', string) # help entries help_strings = [] for subdir,", "sort_keys=True, indent=2, separators=(',', ': '))) f.write('\\n') for string in irc_strings:", "Usage: updatetranslations.py run <irc-dir> <languages-dir> updatetranslations.py --version updatetranslations.py (-h |", "Public Domain Dedication along # with this software. If not,", "ignored_strings: try: irc_strings.remove(s) except ValueError: # ignore any that don't", "under law, the author(s) have dedicated all copyright # and", "#!/usr/bin/env python3 # updatetranslations.py # # tl;dr this script updates", "don't exist ... print(\"chanserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example',", "+ string.count('%d') + string.count('%f'): print(' confirm:', string) # help entries", "string.count('%f'): print(' confirm:', string) # hostserv help entries help_strings =", "filepath = subdir + os.sep + fname if filepath.endswith('.go'): content", "extent possible under law, the author(s) have dedicated all copyright", "+ fname if fname == 'nickserv.go': content = open(filepath, 'r',", "updates our translation file with the newest, coolest strings we've", "in 2018 by <NAME> <<EMAIL>> # # To the extent", "= open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`', content) for match", "help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'), 'w') as f:", "that don't exist ... print(\"hostserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'],", "content) for match in matches: if '\\n' in match and", "== 'chanserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`',", "from docopt import docopt import yaml ignored_strings = [ 'none',", "k in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\\n') for", "print(' confirm:', string) # chanserv help entries help_strings = []", "dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath =", "for s in ignored_strings: try: irc_strings.remove(s) except ValueError: # ignore", "+ fname if fname == 'chanserv.go': content = open(filepath, 'r',", "in ignored_strings: try: irc_strings.remove(s) except ValueError: # ignore any that", "ignore any that don't exist ... print(\"chanserv help strings:\", len(help_strings))", "+ fname if filepath.endswith('.go'): content = open(filepath, 'r', encoding='UTF-8').read() matches", "rights to this software to the public domain # worldwide.", "strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'), 'w') as f: f.write(json.dumps({k:k", "added! # it manually searches the source code, extracts strings", "# ignore any that don't exist ... print(\"chanserv help strings:\",", "Languages directory.\"\"\" import os import re import json from docopt", "don't exist ... print(\"hostserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example',", "matches: if match not in irc_strings: irc_strings.append(match) for s in", "string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string) # help", "filepath = subdir + os.sep + fname if fname ==", "fname if fname == 'help.go': content = open(filepath, 'r', encoding='UTF-8').read()", "for fname in files: filepath = subdir + os.sep +", "# chanserv help entries help_strings = [] for subdir, dirs,", "arguments['run']: # general IRC strings irc_strings = [] for subdir,", "= subdir + os.sep + fname if filepath.endswith('.go'): content =", "separators=(',', ': '))) f.write('\\n') for string in help_strings: if 1", "string.count('%d') + string.count('%f'): print(' confirm:', string.split('\\n')[0]) # nickserv help entries", "'r', encoding='UTF-8').read() matches = re.findall(r'\\.t\\(\"((?:[^\"]|\\\\\")+)\"\\)', content) for match in matches:", "... print(\"irc strings:\", len(irc_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'), 'w') as", "if fname == 'nickserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches", "except ValueError: # ignore any that don't exist ... print(\"chanserv", "import docopt import yaml ignored_strings = [ 'none', 'saset' ]", "'hostserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True,", "# worldwide. This software is distributed without any warranty. #", "any that don't exist ... print(\"help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'],", "content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`', content) for", "= subdir + os.sep + fname if fname == 'help.go':", "'chanserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`', content)", "strings irc_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']):", "encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`', content) for match in matches: if", "matches = re.findall(r'\\.t\\(\"((?:[^\"]|\\\\\")+)\"\\)', content) for match in matches: if match", "len(irc_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'), 'w') as f: f.write(json.dumps({k:k for", "<irc-dir> <languages-dir> updatetranslations.py --version updatetranslations.py (-h | --help) Options: <irc-dir>", "coolest strings we've added! # it manually searches the source", "string.split('\\n')[0]) # nickserv help entries help_strings = [] for subdir,", "help_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for", "and then updates the language files. # Written in 2018", "possible under law, the author(s) have dedicated all copyright #", "len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for", "print(\"nickserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'), 'w') as", "if filepath.endswith('.go'): content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\.t\\(\"((?:[^\"]|\\\\\")+)\"\\)',", "print(\"help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'), 'w') as f:", "and match not in help_strings: help_strings.append(match) for s in ignored_strings:", "in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\\n') for string", "re.findall(r'\\.t\\(\"((?:[^\"]|\\\\\")+)\"\\)', content) for match in matches: if match not in", "... print(\"chanserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'), 'w')", "matches = re.findall(r'\\`([^\\`]+)\\`', content) for match in matches: if match", "if match not in irc_strings: irc_strings.append(match) matches = re.findall(r'\\.t\\(\\`([^\\`]+)\\`\\)', content)", "string.count('%d') + string.count('%f'): print(' confirm:', string) # hostserv help entries", "+ os.sep + fname if fname == 'chanserv.go': content =", "is distributed without any warranty. # # You should have", "for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files:", "+ string.count('%f'): print(' confirm:', string.split('\\n')[0]) # nickserv help entries help_strings", "== 'nickserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`',", "... print(\"hostserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'), 'w')", "in irc_strings: irc_strings.append(match) matches = re.findall(r'\\.t\\(\\`([^\\`]+)\\`\\)', content) for match in", "Oragono's irc subdirectory where the Go code is kept. <languages-dir>", "+ fname if fname == 'help.go': content = open(filepath, 'r',", "if fname == 'chanserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches", "ignore any that don't exist ... print(\"irc strings:\", len(irc_strings)) with", "in irc_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\\n') for string", "1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string)", "public domain # worldwide. This software is distributed without any", "extracts strings and then updates the language files. # Written", "'saset' ] if __name__ == '__main__': arguments = docopt(__doc__, version=\"0.1.0\")", "to the public domain # worldwide. This software is distributed", "<http://creativecommons.org/publicdomain/zero/1.0/>. \"\"\"updatetranslations.py Usage: updatetranslations.py run <irc-dir> <languages-dir> updatetranslations.py --version updatetranslations.py", "'chanserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True,", "in files: filepath = subdir + os.sep + fname if", "os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep", "with open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'), 'w') as f: f.write(json.dumps({k:k for k", "neighboring rights to this software to the public domain #", "print(\"hostserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'), 'w') as", "hostserv help entries help_strings = [] for subdir, dirs, files", "don't exist ... print(\"irc strings:\", len(irc_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'),", "s in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any", "domain # worldwide. This software is distributed without any warranty.", "matches: if match not in irc_strings: irc_strings.append(match) matches = re.findall(r'\\.t\\(\\`([^\\`]+)\\`\\)',", "# it manually searches the source code, extracts strings and", "'\\n' in match and match not in help_strings: help_strings.append(match) for", "files: filepath = subdir + os.sep + fname if filepath.endswith('.go'):", "Options: <irc-dir> Oragono's irc subdirectory where the Go code is", "that don't exist ... print(\"irc strings:\", len(irc_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example',", "strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'), 'w') as f: f.write(json.dumps({k:k", "without any warranty. # # You should have received a", "except ValueError: # ignore any that don't exist ... print(\"hostserv", "language files. # Written in 2018 by <NAME> <<EMAIL>> #", "docopt import yaml ignored_strings = [ 'none', 'saset' ] if", "if arguments['run']: # general IRC strings irc_strings = [] for", "in matches: if match not in irc_strings: irc_strings.append(match) matches =", "+ string.count('%f'): print(' confirm:', string) # chanserv help entries help_strings", "'__main__': arguments = docopt(__doc__, version=\"0.1.0\") if arguments['run']: # general IRC", "'help.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`', content)", "# with this software. If not, see # <http://creativecommons.org/publicdomain/zero/1.0/>. \"\"\"updatetranslations.py", "exist ... print(\"nickserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'),", "': '))) f.write('\\n') for string in irc_strings: if 1 <", "print(' confirm:', string) # hostserv help entries help_strings = []", "= re.findall(r'\\`([^\\`]+)\\`', content) for match in matches: if match not", "open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\.t\\(\"((?:[^\"]|\\\\\")+)\"\\)', content) for match in", "strings and then updates the language files. # Written in", "... print(\"nickserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'), 'w')", "ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any that don't", "This software is distributed without any warranty. # # You", "k in irc_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\\n') for", "exist ... print(\"help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'), 'w')", "try: help_strings.remove(s) except ValueError: # ignore any that don't exist", "fname if fname == 'hostserv.go': content = open(filepath, 'r', encoding='UTF-8').read()", "fname in files: filepath = subdir + os.sep + fname", "= re.findall(r'\\.t\\(\"((?:[^\"]|\\\\\")+)\"\\)', content) for match in matches: if match not", "run <irc-dir> <languages-dir> updatetranslations.py --version updatetranslations.py (-h | --help) Options:", "searches the source code, extracts strings and then updates the", "version=\"0.1.0\") if arguments['run']: # general IRC strings irc_strings = []", "author(s) have dedicated all copyright # and related and neighboring", "string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string) # chanserv", "f: f.write(json.dumps({k:k for k in irc_strings}, sort_keys=True, indent=2, separators=(',', ':", "# # tl;dr this script updates our translation file with", "arguments = docopt(__doc__, version=\"0.1.0\") if arguments['run']: # general IRC strings", "# ignore any that don't exist ... print(\"help strings:\", len(help_strings))", "fname == 'hostserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches =", "software is distributed without any warranty. # # You should", "f.write('\\n') for string in help_strings: if 1 < string.count('%s') +", "irc_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\\n') for string in", "re.findall(r'\\`([^\\`]+)\\`', content) for match in matches: if match not in", "indent=2, separators=(',', ': '))) f.write('\\n') for string in irc_strings: if", "with the newest, coolest strings we've added! # it manually", "+ os.sep + fname if filepath.endswith('.go'): content = open(filepath, 'r',", "any that don't exist ... print(\"irc strings:\", len(irc_strings)) with open(os.path.join(arguments['<languages-dir>'],", "'help.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True,", "help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except ValueError: #", "<languages-dir> updatetranslations.py --version updatetranslations.py (-h | --help) Options: <irc-dir> Oragono's", "ValueError: # ignore any that don't exist ... print(\"help strings:\",", "the source code, extracts strings and then updates the language", "encoding='UTF-8').read() matches = re.findall(r'\\.t\\(\"((?:[^\"]|\\\\\")+)\"\\)', content) for match in matches: if", "| --help) Options: <irc-dir> Oragono's irc subdirectory where the Go", "software. If not, see # <http://creativecommons.org/publicdomain/zero/1.0/>. \"\"\"updatetranslations.py Usage: updatetranslations.py run", "irc_strings.append(match) for s in ignored_strings: try: irc_strings.remove(s) except ValueError: #", "if fname == 'hostserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches", "f.write(json.dumps({k:k for k in irc_strings}, sort_keys=True, indent=2, separators=(',', ': ')))", "updatetranslations.py # # tl;dr this script updates our translation file", "not in help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s)", "match not in irc_strings: irc_strings.append(match) matches = re.findall(r'\\.t\\(\\`([^\\`]+)\\`\\)', content) for", "match in matches: if match not in irc_strings: irc_strings.append(match) matches", "general IRC strings irc_strings = [] for subdir, dirs, files", "print(\"chanserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'), 'w') as", "don't exist ... print(\"help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'),", "irc_strings.remove(s) except ValueError: # ignore any that don't exist ...", "don't exist ... print(\"nickserv help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example',", "<NAME> <<EMAIL>> # # To the extent possible under law,", "= subdir + os.sep + fname if fname == 'chanserv.go':", "the Go code is kept. <languages-dir> Languages directory.\"\"\" import os", "os import re import json from docopt import docopt import", "os.sep + fname if filepath.endswith('.go'): content = open(filepath, 'r', encoding='UTF-8').read()", "# tl;dr this script updates our translation file with the", "irc_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for", "s in ignored_strings: try: irc_strings.remove(s) except ValueError: # ignore any", "'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`', content) for match in matches:", "with this software. If not, see # <http://creativecommons.org/publicdomain/zero/1.0/>. \"\"\"updatetranslations.py Usage:", "= re.findall(r'\\.t\\(\\`([^\\`]+)\\`\\)', content) for match in matches: if match not", "== 'hostserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`',", "for match in matches: if '\\n' in match and match", "match in matches: if match not in help_strings: help_strings.append(match) for", "__name__ == '__main__': arguments = docopt(__doc__, version=\"0.1.0\") if arguments['run']: #", "'example', 'hostserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings},", "Dedication along # with this software. If not, see #", "for string in irc_strings: if 1 < string.count('%s') + string.count('%d')", "string) # help entries help_strings = [] for subdir, dirs,", "os.sep + fname if fname == 'hostserv.go': content = open(filepath,", "translation file with the newest, coolest strings we've added! #", "subdir + os.sep + fname if fname == 'help.go': content", "should have received a copy of the CC0 Public Domain", "[] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in", "in irc_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'):", "< string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string) #", "<reponame>erincerys/ergo<gh_stars>1000+ #!/usr/bin/env python3 # updatetranslations.py # # tl;dr this script", "irc_strings.append(match) matches = re.findall(r'\\.t\\(\\`([^\\`]+)\\`\\)', content) for match in matches: if", "You should have received a copy of the CC0 Public", "for string in help_strings: if 1 < string.count('%s') + string.count('%d')", "len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for", "source code, extracts strings and then updates the language files.", "confirm:', string) # chanserv help entries help_strings = [] for", "<<EMAIL>> # # To the extent possible under law, the", "len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for", "IRC strings irc_strings = [] for subdir, dirs, files in", "== '__main__': arguments = docopt(__doc__, version=\"0.1.0\") if arguments['run']: # general", "string) # chanserv help entries help_strings = [] for subdir,", "subdir + os.sep + fname if filepath.endswith('.go'): content = open(filepath,", "nickserv help entries help_strings = [] for subdir, dirs, files", "+ string.count('%d') + string.count('%f'): print(' confirm:', string.split('\\n')[0]) # nickserv help", "string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string) # hostserv", "print(\"irc strings:\", len(irc_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'), 'w') as f:", "with open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k", "= [ 'none', 'saset' ] if __name__ == '__main__': arguments", "received a copy of the CC0 Public Domain Dedication along", "irc_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print('", "for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\\n')", "match and match not in help_strings: help_strings.append(match) for s in", "help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\\n') for string in", "in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any that", "string.count('%d') + string.count('%f'): print(' confirm:', string) # chanserv help entries", "by <NAME> <<EMAIL>> # # To the extent possible under", "'example', 'chanserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings},", "+ string.count('%f'): print(' confirm:', string) # help entries help_strings =", "# and related and neighboring rights to this software to", "os.sep + fname if fname == 'chanserv.go': content = open(filepath,", "json from docopt import docopt import yaml ignored_strings = [", "import re import json from docopt import docopt import yaml", "import json from docopt import docopt import yaml ignored_strings =", "'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2,", "string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string.split('\\n')[0]) # nickserv", "# To the extent possible under law, the author(s) have", "'))) f.write('\\n') for string in help_strings: if 1 < string.count('%s')", "this software. If not, see # <http://creativecommons.org/publicdomain/zero/1.0/>. \"\"\"updatetranslations.py Usage: updatetranslations.py", "matches = re.findall(r'\\.t\\(\\`([^\\`]+)\\`\\)', content) for match in matches: if match", "ignore any that don't exist ... print(\"help strings:\", len(help_strings)) with", "string in irc_strings: if 1 < string.count('%s') + string.count('%d') +", "code is kept. <languages-dir> Languages directory.\"\"\" import os import re", "the language files. # Written in 2018 by <NAME> <<EMAIL>>", "'irc.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in irc_strings}, sort_keys=True,", "= subdir + os.sep + fname if fname == 'nickserv.go':", "in matches: if match not in help_strings: help_strings.append(match) for s", "# nickserv help entries help_strings = [] for subdir, dirs,", "this script updates our translation file with the newest, coolest", "open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in", "Written in 2018 by <NAME> <<EMAIL>> # # To the", "subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath", "in irc_strings: irc_strings.append(match) for s in ignored_strings: try: irc_strings.remove(s) except", "<irc-dir> Oragono's irc subdirectory where the Go code is kept.", "to this software to the public domain # worldwide. This", "= [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname", "filepath.endswith('.go'): content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\.t\\(\"((?:[^\"]|\\\\\")+)\"\\)', content)", "help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except ValueError:", "except ValueError: # ignore any that don't exist ... print(\"irc", "docopt import docopt import yaml ignored_strings = [ 'none', 'saset'", "# help entries help_strings = [] for subdir, dirs, files", "kept. <languages-dir> Languages directory.\"\"\" import os import re import json", "a copy of the CC0 Public Domain Dedication along #", "any warranty. # # You should have received a copy", "os.sep + fname if fname == 'nickserv.go': content = open(filepath,", "in help_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'):", "warranty. # # You should have received a copy of", "content) for match in matches: if match not in irc_strings:", "f.write('\\n') for string in irc_strings: if 1 < string.count('%s') +", "Domain Dedication along # with this software. If not, see", "ValueError: # ignore any that don't exist ... print(\"hostserv help", "open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in", "string.count('%f'): print(' confirm:', string) # chanserv help entries help_strings =", "'nickserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\\`([^\\`]+)\\`', content)", "1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string.split('\\n')[0])", "# # To the extent possible under law, the author(s)", "distributed without any warranty. # # You should have received", "f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ':", "# ignore any that don't exist ... print(\"hostserv help strings:\",", "+ string.count('%d') + string.count('%f'): print(' confirm:', string) # hostserv help", "any that don't exist ... print(\"hostserv help strings:\", len(help_strings)) with", "'example', 'help.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings},", "string in help_strings: if 1 < string.count('%s') + string.count('%d') +", "fname == 'nickserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches =", "yaml ignored_strings = [ 'none', 'saset' ] if __name__ ==", "updates the language files. # Written in 2018 by <NAME>", "help strings:\", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'), 'w') as f:", "any that don't exist ... print(\"nickserv help strings:\", len(help_strings)) with", "# # You should have received a copy of the", "fname == 'help.go': content = open(filepath, 'r', encoding='UTF-8').read() matches =", "# ignore any that don't exist ... print(\"irc strings:\", len(irc_strings))" ]
[ "for fname in fnames: f = open(fname) for l in", "<filename>processing_tools/number_of_tenants.py<gh_stars>1-10 import sys from collections import defaultdict def Process (fnames):", "priv = int(parts[1]) pub = int(parts[2]) num_machines = tenants *", "print #print \"%d %d %f\"%(k, runs[k], machines[k]/float(runs[k])) if __name__ ==", "+ oext_time tenant_time[(priv, pub)][tenants] += total tenant_run[(priv, pub)][tenants] += 1", "= defaultdict(lambda: defaultdict(lambda: 0.0)) tenant_run = defaultdict(lambda: defaultdict(lambda:0)) for fname", "= ext_checks * float(parts[4]) oext_check = (tenants * priv) *", "ext_checks = (tenants * priv) * ((tenants - 1) *", "fnames: f = open(fname) for l in f: if l.startswith(\"tenant\"):", "defaultdict(lambda: defaultdict(lambda: 0.0)) tenant_run = defaultdict(lambda: defaultdict(lambda:0)) for fname in", "* float(parts[3]) ext_checks = (tenants * priv) * ((tenants -", "* ((tenants - 1) * pub) ext_time = ext_checks *", "tenant_time[k][k2]/float(tenant_run[k][k2])) print print #print \"%d %d %f\"%(k, runs[k], machines[k]/float(runs[k])) if", "total = int_time + ext_time + oext_time tenant_time[(priv, pub)][tenants] +=", "tenants = int(parts[0]) priv = int(parts[1]) pub = int(parts[2]) num_machines", "(tenants * pub) oext_time = oext_check * float(parts[5]) total =", "int_time + ext_time + oext_time tenant_time[(priv, pub)][tenants] += total tenant_run[(priv,", "pub) ext_time = ext_checks * float(parts[4]) oext_check = (tenants *", "+ ext_time + oext_time tenant_time[(priv, pub)][tenants] += total tenant_run[(priv, pub)][tenants]", "tenants * priv * pub int_checks = (tenants * tenants", "= (tenants * priv) * ((tenants - 1) * pub)", "in f: if l.startswith(\"tenant\"): continue parts = l.strip().split() tenants =", "tenant_run = defaultdict(lambda: defaultdict(lambda:0)) for fname in fnames: f =", "fname in fnames: f = open(fname) for l in f:", "tenant_run[(priv, pub)][tenants] += 1 for k in sorted(tenant_run.keys()): print \"#", "continue parts = l.strip().split() tenants = int(parts[0]) priv = int(parts[1])", "%f\"%(k2, tenant_run[k][k2], \\ tenant_time[k][k2]/float(tenant_run[k][k2])) print print #print \"%d %d %f\"%(k,", "print \"# ----%s------\"%(str(k)) for k2 in sorted(tenant_run[k].keys()): print \"%d %d", "* float(parts[5]) total = int_time + ext_time + oext_time tenant_time[(priv,", "k in sorted(tenant_run.keys()): print \"# ----%s------\"%(str(k)) for k2 in sorted(tenant_run[k].keys()):", "#print \"%d %d %f\"%(k, runs[k], machines[k]/float(runs[k])) if __name__ == \"__main__\":", "defaultdict(lambda: 0.0)) tenant_run = defaultdict(lambda: defaultdict(lambda:0)) for fname in fnames:", "float(parts[4]) oext_check = (tenants * priv) * (tenants * pub)", "float(parts[3]) ext_checks = (tenants * priv) * ((tenants - 1)", "+= total tenant_run[(priv, pub)][tenants] += 1 for k in sorted(tenant_run.keys()):", "(priv - 1)) / 2 int_time = int_checks * float(parts[3])", "f: if l.startswith(\"tenant\"): continue parts = l.strip().split() tenants = int(parts[0])", "defaultdict(lambda: defaultdict(lambda:0)) for fname in fnames: f = open(fname) for", "* priv * pub int_checks = (tenants * tenants *", "parts = l.strip().split() tenants = int(parts[0]) priv = int(parts[1]) pub", "import sys from collections import defaultdict def Process (fnames): tenant_time", "oext_check * float(parts[5]) total = int_time + ext_time + oext_time", "((tenants - 1) * pub) ext_time = ext_checks * float(parts[4])", "priv * (priv - 1)) / 2 int_time = int_checks", "l.strip().split() tenants = int(parts[0]) priv = int(parts[1]) pub = int(parts[2])", "priv) * (tenants * pub) oext_time = oext_check * float(parts[5])", "\"%d %d %f\"%(k, runs[k], machines[k]/float(runs[k])) if __name__ == \"__main__\": Process(sys.argv[1:])", "defaultdict(lambda:0)) for fname in fnames: f = open(fname) for l", "float(parts[5]) total = int_time + ext_time + oext_time tenant_time[(priv, pub)][tenants]", "defaultdict def Process (fnames): tenant_time = defaultdict(lambda: defaultdict(lambda: 0.0)) tenant_run", "= (tenants * tenants * priv * (priv - 1))", "* pub) ext_time = ext_checks * float(parts[4]) oext_check = (tenants", "(tenants * tenants * priv * (priv - 1)) /", "= defaultdict(lambda: defaultdict(lambda:0)) for fname in fnames: f = open(fname)", "\"%d %d %f\"%(k2, tenant_run[k][k2], \\ tenant_time[k][k2]/float(tenant_run[k][k2])) print print #print \"%d", "(tenants * priv) * (tenants * pub) oext_time = oext_check", "import defaultdict def Process (fnames): tenant_time = defaultdict(lambda: defaultdict(lambda: 0.0))", "* priv) * ((tenants - 1) * pub) ext_time =", "= int_checks * float(parts[3]) ext_checks = (tenants * priv) *", "for l in f: if l.startswith(\"tenant\"): continue parts = l.strip().split()", "= int(parts[1]) pub = int(parts[2]) num_machines = tenants * priv", "* pub int_checks = (tenants * tenants * priv *", "from collections import defaultdict def Process (fnames): tenant_time = defaultdict(lambda:", "pub)][tenants] += total tenant_run[(priv, pub)][tenants] += 1 for k in", "priv) * ((tenants - 1) * pub) ext_time = ext_checks", "1)) / 2 int_time = int_checks * float(parts[3]) ext_checks =", "print print #print \"%d %d %f\"%(k, runs[k], machines[k]/float(runs[k])) if __name__", "= tenants * priv * pub int_checks = (tenants *", "(tenants * priv) * ((tenants - 1) * pub) ext_time", "= int(parts[2]) num_machines = tenants * priv * pub int_checks", "+= 1 for k in sorted(tenant_run.keys()): print \"# ----%s------\"%(str(k)) for", "pub)][tenants] += 1 for k in sorted(tenant_run.keys()): print \"# ----%s------\"%(str(k))", "f = open(fname) for l in f: if l.startswith(\"tenant\"): continue", "l.startswith(\"tenant\"): continue parts = l.strip().split() tenants = int(parts[0]) priv =", "* pub) oext_time = oext_check * float(parts[5]) total = int_time", "Process (fnames): tenant_time = defaultdict(lambda: defaultdict(lambda: 0.0)) tenant_run = defaultdict(lambda:", "= open(fname) for l in f: if l.startswith(\"tenant\"): continue parts", "int(parts[0]) priv = int(parts[1]) pub = int(parts[2]) num_machines = tenants", "- 1) * pub) ext_time = ext_checks * float(parts[4]) oext_check", "in fnames: f = open(fname) for l in f: if", "print \"%d %d %f\"%(k2, tenant_run[k][k2], \\ tenant_time[k][k2]/float(tenant_run[k][k2])) print print #print", "int_checks = (tenants * tenants * priv * (priv -", "tenants * priv * (priv - 1)) / 2 int_time", "(fnames): tenant_time = defaultdict(lambda: defaultdict(lambda: 0.0)) tenant_run = defaultdict(lambda: defaultdict(lambda:0))", "tenant_run[k][k2], \\ tenant_time[k][k2]/float(tenant_run[k][k2])) print print #print \"%d %d %f\"%(k, runs[k],", "open(fname) for l in f: if l.startswith(\"tenant\"): continue parts =", "if l.startswith(\"tenant\"): continue parts = l.strip().split() tenants = int(parts[0]) priv", "oext_time tenant_time[(priv, pub)][tenants] += total tenant_run[(priv, pub)][tenants] += 1 for", "/ 2 int_time = int_checks * float(parts[3]) ext_checks = (tenants", "\"# ----%s------\"%(str(k)) for k2 in sorted(tenant_run[k].keys()): print \"%d %d %f\"%(k2,", "* float(parts[4]) oext_check = (tenants * priv) * (tenants *", "l in f: if l.startswith(\"tenant\"): continue parts = l.strip().split() tenants", "sorted(tenant_run.keys()): print \"# ----%s------\"%(str(k)) for k2 in sorted(tenant_run[k].keys()): print \"%d", "num_machines = tenants * priv * pub int_checks = (tenants", "oext_time = oext_check * float(parts[5]) total = int_time + ext_time", "1) * pub) ext_time = ext_checks * float(parts[4]) oext_check =", "priv * pub int_checks = (tenants * tenants * priv", "int(parts[2]) num_machines = tenants * priv * pub int_checks =", "= int_time + ext_time + oext_time tenant_time[(priv, pub)][tenants] += total", "----%s------\"%(str(k)) for k2 in sorted(tenant_run[k].keys()): print \"%d %d %f\"%(k2, tenant_run[k][k2],", "\\ tenant_time[k][k2]/float(tenant_run[k][k2])) print print #print \"%d %d %f\"%(k, runs[k], machines[k]/float(runs[k]))", "for k2 in sorted(tenant_run[k].keys()): print \"%d %d %f\"%(k2, tenant_run[k][k2], \\", "pub = int(parts[2]) num_machines = tenants * priv * pub", "* (priv - 1)) / 2 int_time = int_checks *", "ext_time + oext_time tenant_time[(priv, pub)][tenants] += total tenant_run[(priv, pub)][tenants] +=", "pub int_checks = (tenants * tenants * priv * (priv", "= oext_check * float(parts[5]) total = int_time + ext_time +", "total tenant_run[(priv, pub)][tenants] += 1 for k in sorted(tenant_run.keys()): print", "= l.strip().split() tenants = int(parts[0]) priv = int(parts[1]) pub =", "0.0)) tenant_run = defaultdict(lambda: defaultdict(lambda:0)) for fname in fnames: f", "tenant_time[(priv, pub)][tenants] += total tenant_run[(priv, pub)][tenants] += 1 for k", "ext_checks * float(parts[4]) oext_check = (tenants * priv) * (tenants", "* priv) * (tenants * pub) oext_time = oext_check *", "int(parts[1]) pub = int(parts[2]) num_machines = tenants * priv *", "in sorted(tenant_run[k].keys()): print \"%d %d %f\"%(k2, tenant_run[k][k2], \\ tenant_time[k][k2]/float(tenant_run[k][k2])) print", "sys from collections import defaultdict def Process (fnames): tenant_time =", "k2 in sorted(tenant_run[k].keys()): print \"%d %d %f\"%(k2, tenant_run[k][k2], \\ tenant_time[k][k2]/float(tenant_run[k][k2]))", "in sorted(tenant_run.keys()): print \"# ----%s------\"%(str(k)) for k2 in sorted(tenant_run[k].keys()): print", "%d %f\"%(k2, tenant_run[k][k2], \\ tenant_time[k][k2]/float(tenant_run[k][k2])) print print #print \"%d %d", "* tenants * priv * (priv - 1)) / 2", "1 for k in sorted(tenant_run.keys()): print \"# ----%s------\"%(str(k)) for k2", "tenant_time = defaultdict(lambda: defaultdict(lambda: 0.0)) tenant_run = defaultdict(lambda: defaultdict(lambda:0)) for", "* (tenants * pub) oext_time = oext_check * float(parts[5]) total", "* priv * (priv - 1)) / 2 int_time =", "int_time = int_checks * float(parts[3]) ext_checks = (tenants * priv)", "def Process (fnames): tenant_time = defaultdict(lambda: defaultdict(lambda: 0.0)) tenant_run =", "ext_time = ext_checks * float(parts[4]) oext_check = (tenants * priv)", "int_checks * float(parts[3]) ext_checks = (tenants * priv) * ((tenants", "= int(parts[0]) priv = int(parts[1]) pub = int(parts[2]) num_machines =", "for k in sorted(tenant_run.keys()): print \"# ----%s------\"%(str(k)) for k2 in", "collections import defaultdict def Process (fnames): tenant_time = defaultdict(lambda: defaultdict(lambda:", "2 int_time = int_checks * float(parts[3]) ext_checks = (tenants *", "sorted(tenant_run[k].keys()): print \"%d %d %f\"%(k2, tenant_run[k][k2], \\ tenant_time[k][k2]/float(tenant_run[k][k2])) print print", "- 1)) / 2 int_time = int_checks * float(parts[3]) ext_checks", "= (tenants * priv) * (tenants * pub) oext_time =", "pub) oext_time = oext_check * float(parts[5]) total = int_time +", "oext_check = (tenants * priv) * (tenants * pub) oext_time" ]
[ "\"\"\" try: disable_mpi_env = os.environ['DISABLE_MPI'] disable_mpi = True if disable_mpi_env.lower().strip()", "as np import os,sys,time \"\"\" Copied from orphics.mpi \"\"\" try:", "MPI implementation \"\"\" def __init__(self): pass def Get_rank(self): return 0", "thousands of # wasted CPU hours # def cleanup(type, value,", "to the last set of cores (so that rank 0", "back to fake MPI. This means that if you submitted", "pass MPI = template() MPI.COMM_WORLD = fakeMpiComm() def mpi_distribute(num_tasks,avail_cores,allow_empty=False): #", "from orphics.mpi \"\"\" try: disable_mpi_env = os.environ['DISABLE_MPI'] disable_mpi = True", "task_dist = [task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)] # a list", "pass try: if disable_mpi: raise from mpi4py import MPI except:", "task task_dist = [task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)] # a", "Sigurd's enlib.mpi: # Uncaught exceptions don't cause mpi to abort.", "the end indices for each task task_dist = [task_range[x:y] for", "= fakeMpiComm() def mpi_distribute(num_tasks,avail_cores,allow_empty=False): # copied to mapsims.convert_noise_templates if not(allow_empty):", "extra jobs) task_range = list(range(num_tasks)) # the full range of", "rank = comm.Get_rank() numcores = comm.Get_size() num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs) if", "cleanup(type, value, traceback): # sys.__excepthook__(type, value, traceback) # MPI.COMM_WORLD.Abort(1) #", "print (\"At most \", max(num_each) , \" tasks...\") my_tasks =", "assert avail_cores<=num_tasks min_each, rem = divmod(num_tasks,avail_cores) num_each = np.array([min_each]*avail_cores) #", "cause mpi to abort. This can lead to thousands of", "\"\"\" A Simple Fake MPI implementation \"\"\" def __init__(self): pass", "full range of tasks cumul = np.cumsum(num_each).tolist() # the end", "indices for each task task_dist = [task_range[x:y] for x,y in", "mpi to abort. This can lead to thousands of #", "tasks for each core assert sum(num_each)==num_tasks assert len(num_each)==avail_cores assert len(task_dist)==avail_cores", "implementation \"\"\" def __init__(self): pass def Get_rank(self): return 0 def", "This means that if you submitted multiple processes, they will", "intel-mpi! If you use it on openmpi, you will have", "distribute equally if rem>0: num_each[-rem:] += 1 # add the", "False except: disable_mpi = False \"\"\" Use the below cleanup", "import MPI except: if not(disable_mpi): print(\"WARNING: mpi4py could not be", "except: disable_mpi = False \"\"\" Use the below cleanup stuff", "cumul = np.cumsum(num_each).tolist() # the end indices for each task", "a list containing the tasks for each core assert sum(num_each)==num_tasks", "len(num_each)==avail_cores assert len(task_dist)==avail_cores return num_each,task_dist def distribute(njobs,verbose=True,**kwargs): comm = MPI.COMM_WORLD", "don't cause mpi to abort. This can lead to thousands", "= list(range(num_tasks)) # the full range of tasks cumul =", "MPI.COMM_WORLD = fakeMpiComm() def mpi_distribute(num_tasks,avail_cores,allow_empty=False): # copied to mapsims.convert_noise_templates if", "for each core assert sum(num_each)==num_tasks assert len(num_each)==avail_cores assert len(task_dist)==avail_cores return", "= MPI.COMM_WORLD rank = comm.Get_rank() numcores = comm.Get_size() num_each,each_tasks =", "= os.environ['DISABLE_MPI'] disable_mpi = True if disable_mpi_env.lower().strip() == \"true\" else", "From Sigurd's enlib.mpi: # Uncaught exceptions don't cause mpi to", "that if you submitted multiple processes, they will all be", "= cleanup class fakeMpiComm: \"\"\" A Simple Fake MPI implementation", "on openmpi, you will have no traceback for errors causing", "= comm.Get_rank() numcores = comm.Get_size() num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs) if rank==0:", "= True if disable_mpi_env.lower().strip() == \"true\" else False except: disable_mpi", "rem = divmod(num_tasks,avail_cores) num_each = np.array([min_each]*avail_cores) # first distribute equally", "traceback) # MPI.COMM_WORLD.Abort(1) # sys.excepthook = cleanup class fakeMpiComm: \"\"\"", "that rank 0 never gets extra jobs) task_range = list(range(num_tasks))", "min_each, rem = divmod(num_tasks,avail_cores) num_each = np.array([min_each]*avail_cores) # first distribute", "each task task_dist = [task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)] #", "try: disable_mpi_env = os.environ['DISABLE_MPI'] disable_mpi = True if disable_mpi_env.lower().strip() ==", "can lead to thousands of # wasted CPU hours #", "len(task_dist)==avail_cores return num_each,task_dist def distribute(njobs,verbose=True,**kwargs): comm = MPI.COMM_WORLD rank =", "copied to mapsims.convert_noise_templates if not(allow_empty): assert avail_cores<=num_tasks min_each, rem =", "os,sys,time \"\"\" Copied from orphics.mpi \"\"\" try: disable_mpi_env = os.environ['DISABLE_MPI']", "except: if not(disable_mpi): print(\"WARNING: mpi4py could not be loaded. Falling", "(\"At most \", max(num_each) , \" tasks...\") my_tasks = each_tasks[rank]", "disable_mpi_env = os.environ['DISABLE_MPI'] disable_mpi = True if disable_mpi_env.lower().strip() == \"true\"", "False \"\"\" Use the below cleanup stuff only for intel-mpi!", "else False except: disable_mpi = False \"\"\" Use the below", "for intel-mpi! If you use it on openmpi, you will", "disable_mpi = False \"\"\" Use the below cleanup stuff only", "of # wasted CPU hours # def cleanup(type, value, traceback):", "print(\"WARNING: mpi4py could not be loaded. Falling back to fake", "raise from mpi4py import MPI except: if not(disable_mpi): print(\"WARNING: mpi4py", "= template() MPI.COMM_WORLD = fakeMpiComm() def mpi_distribute(num_tasks,avail_cores,allow_empty=False): # copied to", "# the full range of tasks cumul = np.cumsum(num_each).tolist() #", "wasted CPU hours # def cleanup(type, value, traceback): # sys.__excepthook__(type,", "will have no traceback for errors causing hours of endless", "\"\"\" # From Sigurd's enlib.mpi: # Uncaught exceptions don't cause", "disable_mpi_env.lower().strip() == \"true\" else False except: disable_mpi = False \"\"\"", "try: if disable_mpi: raise from mpi4py import MPI except: if", "pass def Get_rank(self): return 0 def Get_size(self): return 1 def", "if not(allow_empty): assert avail_cores<=num_tasks min_each, rem = divmod(num_tasks,avail_cores) num_each =", "\"\"\" Copied from orphics.mpi \"\"\" try: disable_mpi_env = os.environ['DISABLE_MPI'] disable_mpi", "of endless confusion and frustration! - Sincerely, past frustrated Mat", "= comm.Get_size() num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs) if rank==0: print (\"At most", "the last set of cores (so that rank 0 never", "assert sum(num_each)==num_tasks assert len(num_each)==avail_cores assert len(task_dist)==avail_cores return num_each,task_dist def distribute(njobs,verbose=True,**kwargs):", "MPI except: if not(disable_mpi): print(\"WARNING: mpi4py could not be loaded.", "thing.\") class template: pass MPI = template() MPI.COMM_WORLD = fakeMpiComm()", "no traceback for errors causing hours of endless confusion and", "value, traceback) # MPI.COMM_WORLD.Abort(1) # sys.excepthook = cleanup class fakeMpiComm:", "set of cores (so that rank 0 never gets extra", "cleanup class fakeMpiComm: \"\"\" A Simple Fake MPI implementation \"\"\"", "to fake MPI. This means that if you submitted multiple", "# Uncaught exceptions don't cause mpi to abort. This can", "and they are potentially doing the same thing.\") class template:", "MPI. This means that if you submitted multiple processes, they", "import print_function import numpy as np import os,sys,time \"\"\" Copied", "1 # add the remainder to the last set of", "from mpi4py import MPI except: if not(disable_mpi): print(\"WARNING: mpi4py could", "you will have no traceback for errors causing hours of", "tasks cumul = np.cumsum(num_each).tolist() # the end indices for each", "MPI.COMM_WORLD rank = comm.Get_rank() numcores = comm.Get_size() num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs)", "return 1 def Barrier(self): pass def Abort(self,dummy): pass try: if", "multiple processes, they will all be assigned the same rank", "os.environ['DISABLE_MPI'] disable_mpi = True if disable_mpi_env.lower().strip() == \"true\" else False", "print_function import numpy as np import os,sys,time \"\"\" Copied from", "= divmod(num_tasks,avail_cores) num_each = np.array([min_each]*avail_cores) # first distribute equally if", "remainder to the last set of cores (so that rank", "[task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)] # a list containing the", "This can lead to thousands of # wasted CPU hours", "same rank of 0, and they are potentially doing the", "value, traceback): # sys.__excepthook__(type, value, traceback) # MPI.COMM_WORLD.Abort(1) # sys.excepthook", "containing the tasks for each core assert sum(num_each)==num_tasks assert len(num_each)==avail_cores", "mpi_distribute(njobs,numcores,**kwargs) if rank==0: print (\"At most \", max(num_each) , \"", "first distribute equally if rem>0: num_each[-rem:] += 1 # add", "# wasted CPU hours # def cleanup(type, value, traceback): #", "Barrier(self): pass def Abort(self,dummy): pass try: if disable_mpi: raise from", "openmpi, you will have no traceback for errors causing hours", "import os,sys,time \"\"\" Copied from orphics.mpi \"\"\" try: disable_mpi_env =", "0 def Get_size(self): return 1 def Barrier(self): pass def Abort(self,dummy):", "they are potentially doing the same thing.\") class template: pass", "endless confusion and frustration! - Sincerely, past frustrated Mat \"\"\"", "def cleanup(type, value, traceback): # sys.__excepthook__(type, value, traceback) # MPI.COMM_WORLD.Abort(1)", "return 0 def Get_size(self): return 1 def Barrier(self): pass def", "rem>0: num_each[-rem:] += 1 # add the remainder to the", "task_range = list(range(num_tasks)) # the full range of tasks cumul", "will all be assigned the same rank of 0, and", "num_each[-rem:] += 1 # add the remainder to the last", "to abort. This can lead to thousands of # wasted", "__future__ import print_function import numpy as np import os,sys,time \"\"\"", "__init__(self): pass def Get_rank(self): return 0 def Get_size(self): return 1", "not be loaded. Falling back to fake MPI. This means", "def Get_size(self): return 1 def Barrier(self): pass def Abort(self,dummy): pass", "Get_rank(self): return 0 def Get_size(self): return 1 def Barrier(self): pass", "np.array([min_each]*avail_cores) # first distribute equally if rem>0: num_each[-rem:] += 1", "num_each,task_dist def distribute(njobs,verbose=True,**kwargs): comm = MPI.COMM_WORLD rank = comm.Get_rank() numcores", "Falling back to fake MPI. This means that if you", "cleanup stuff only for intel-mpi! If you use it on", "disable_mpi = True if disable_mpi_env.lower().strip() == \"true\" else False except:", "class fakeMpiComm: \"\"\" A Simple Fake MPI implementation \"\"\" def", "core assert sum(num_each)==num_tasks assert len(num_each)==avail_cores assert len(task_dist)==avail_cores return num_each,task_dist def", "A Simple Fake MPI implementation \"\"\" def __init__(self): pass def", "fake MPI. This means that if you submitted multiple processes,", "def Barrier(self): pass def Abort(self,dummy): pass try: if disable_mpi: raise", "have no traceback for errors causing hours of endless confusion", "assigned the same rank of 0, and they are potentially", "# def cleanup(type, value, traceback): # sys.__excepthook__(type, value, traceback) #", "# first distribute equally if rem>0: num_each[-rem:] += 1 #", "np.cumsum(num_each).tolist() # the end indices for each task task_dist =", "x,y in zip([0]+cumul[:-1],cumul)] # a list containing the tasks for", "def Get_rank(self): return 0 def Get_size(self): return 1 def Barrier(self):", "for each task task_dist = [task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)]", "= False \"\"\" Use the below cleanup stuff only for", "cores (so that rank 0 never gets extra jobs) task_range", "# sys.__excepthook__(type, value, traceback) # MPI.COMM_WORLD.Abort(1) # sys.excepthook = cleanup", "0 never gets extra jobs) task_range = list(range(num_tasks)) # the", "if disable_mpi_env.lower().strip() == \"true\" else False except: disable_mpi = False", "fakeMpiComm() def mpi_distribute(num_tasks,avail_cores,allow_empty=False): # copied to mapsims.convert_noise_templates if not(allow_empty): assert", "CPU hours # def cleanup(type, value, traceback): # sys.__excepthook__(type, value,", "you submitted multiple processes, they will all be assigned the", "mpi_distribute(num_tasks,avail_cores,allow_empty=False): # copied to mapsims.convert_noise_templates if not(allow_empty): assert avail_cores<=num_tasks min_each,", "list containing the tasks for each core assert sum(num_each)==num_tasks assert", "Uncaught exceptions don't cause mpi to abort. This can lead", "of 0, and they are potentially doing the same thing.\")", "Fake MPI implementation \"\"\" def __init__(self): pass def Get_rank(self): return", "orphics.mpi \"\"\" try: disable_mpi_env = os.environ['DISABLE_MPI'] disable_mpi = True if", "to thousands of # wasted CPU hours # def cleanup(type,", "are potentially doing the same thing.\") class template: pass MPI", "to mapsims.convert_noise_templates if not(allow_empty): assert avail_cores<=num_tasks min_each, rem = divmod(num_tasks,avail_cores)", "all be assigned the same rank of 0, and they", "sys.__excepthook__(type, value, traceback) # MPI.COMM_WORLD.Abort(1) # sys.excepthook = cleanup class", "If you use it on openmpi, you will have no", "causing hours of endless confusion and frustration! - Sincerely, past", "gets extra jobs) task_range = list(range(num_tasks)) # the full range", "add the remainder to the last set of cores (so", "jobs) task_range = list(range(num_tasks)) # the full range of tasks", "# a list containing the tasks for each core assert", "mapsims.convert_noise_templates if not(allow_empty): assert avail_cores<=num_tasks min_each, rem = divmod(num_tasks,avail_cores) num_each", "use it on openmpi, you will have no traceback for", "0, and they are potentially doing the same thing.\") class", "be assigned the same rank of 0, and they are", "template: pass MPI = template() MPI.COMM_WORLD = fakeMpiComm() def mpi_distribute(num_tasks,avail_cores,allow_empty=False):", "rank==0: print (\"At most \", max(num_each) , \" tasks...\") my_tasks", "Mat \"\"\" # From Sigurd's enlib.mpi: # Uncaught exceptions don't", "for x,y in zip([0]+cumul[:-1],cumul)] # a list containing the tasks", "it on openmpi, you will have no traceback for errors", "frustrated Mat \"\"\" # From Sigurd's enlib.mpi: # Uncaught exceptions", "doing the same thing.\") class template: pass MPI = template()", "traceback): # sys.__excepthook__(type, value, traceback) # MPI.COMM_WORLD.Abort(1) # sys.excepthook =", "they will all be assigned the same rank of 0,", "\"\"\" def __init__(self): pass def Get_rank(self): return 0 def Get_size(self):", "def mpi_distribute(num_tasks,avail_cores,allow_empty=False): # copied to mapsims.convert_noise_templates if not(allow_empty): assert avail_cores<=num_tasks", "= mpi_distribute(njobs,numcores,**kwargs) if rank==0: print (\"At most \", max(num_each) ,", "Get_size(self): return 1 def Barrier(self): pass def Abort(self,dummy): pass try:", "import numpy as np import os,sys,time \"\"\" Copied from orphics.mpi", "never gets extra jobs) task_range = list(range(num_tasks)) # the full", "= [task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)] # a list containing", "of cores (so that rank 0 never gets extra jobs)", "if not(disable_mpi): print(\"WARNING: mpi4py could not be loaded. Falling back", "+= 1 # add the remainder to the last set", "disable_mpi: raise from mpi4py import MPI except: if not(disable_mpi): print(\"WARNING:", "list(range(num_tasks)) # the full range of tasks cumul = np.cumsum(num_each).tolist()", "assert len(task_dist)==avail_cores return num_each,task_dist def distribute(njobs,verbose=True,**kwargs): comm = MPI.COMM_WORLD rank", "you use it on openmpi, you will have no traceback", "range of tasks cumul = np.cumsum(num_each).tolist() # the end indices", "not(disable_mpi): print(\"WARNING: mpi4py could not be loaded. Falling back to", "divmod(num_tasks,avail_cores) num_each = np.array([min_each]*avail_cores) # first distribute equally if rem>0:", "from __future__ import print_function import numpy as np import os,sys,time", "confusion and frustration! - Sincerely, past frustrated Mat \"\"\" #", "# sys.excepthook = cleanup class fakeMpiComm: \"\"\" A Simple Fake", "class template: pass MPI = template() MPI.COMM_WORLD = fakeMpiComm() def", "sys.excepthook = cleanup class fakeMpiComm: \"\"\" A Simple Fake MPI", "def __init__(self): pass def Get_rank(self): return 0 def Get_size(self): return", "= np.array([min_each]*avail_cores) # first distribute equally if rem>0: num_each[-rem:] +=", "= np.cumsum(num_each).tolist() # the end indices for each task task_dist", "the remainder to the last set of cores (so that", "def distribute(njobs,verbose=True,**kwargs): comm = MPI.COMM_WORLD rank = comm.Get_rank() numcores =", "avail_cores<=num_tasks min_each, rem = divmod(num_tasks,avail_cores) num_each = np.array([min_each]*avail_cores) # first", "the same rank of 0, and they are potentially doing", "1 def Barrier(self): pass def Abort(self,dummy): pass try: if disable_mpi:", "only for intel-mpi! If you use it on openmpi, you", "for errors causing hours of endless confusion and frustration! -", "\"true\" else False except: disable_mpi = False \"\"\" Use the", "pass def Abort(self,dummy): pass try: if disable_mpi: raise from mpi4py", "loaded. Falling back to fake MPI. This means that if", "processes, they will all be assigned the same rank of", "num_each = np.array([min_each]*avail_cores) # first distribute equally if rem>0: num_each[-rem:]", "errors causing hours of endless confusion and frustration! - Sincerely,", "same thing.\") class template: pass MPI = template() MPI.COMM_WORLD =", "each core assert sum(num_each)==num_tasks assert len(num_each)==avail_cores assert len(task_dist)==avail_cores return num_each,task_dist", "could not be loaded. Falling back to fake MPI. This", "if disable_mpi: raise from mpi4py import MPI except: if not(disable_mpi):", "the same thing.\") class template: pass MPI = template() MPI.COMM_WORLD", "== \"true\" else False except: disable_mpi = False \"\"\" Use", "not(allow_empty): assert avail_cores<=num_tasks min_each, rem = divmod(num_tasks,avail_cores) num_each = np.array([min_each]*avail_cores)", "distribute(njobs,verbose=True,**kwargs): comm = MPI.COMM_WORLD rank = comm.Get_rank() numcores = comm.Get_size()", "below cleanup stuff only for intel-mpi! If you use it", "# copied to mapsims.convert_noise_templates if not(allow_empty): assert avail_cores<=num_tasks min_each, rem", "submitted multiple processes, they will all be assigned the same", "the below cleanup stuff only for intel-mpi! If you use", "end indices for each task task_dist = [task_range[x:y] for x,y", "if rank==0: print (\"At most \", max(num_each) , \" tasks...\")", "Copied from orphics.mpi \"\"\" try: disable_mpi_env = os.environ['DISABLE_MPI'] disable_mpi =", "fakeMpiComm: \"\"\" A Simple Fake MPI implementation \"\"\" def __init__(self):", "return num_each,task_dist def distribute(njobs,verbose=True,**kwargs): comm = MPI.COMM_WORLD rank = comm.Get_rank()", "most \", max(num_each) , \" tasks...\") my_tasks = each_tasks[rank] return", "- Sincerely, past frustrated Mat \"\"\" # From Sigurd's enlib.mpi:", "# the end indices for each task task_dist = [task_range[x:y]", "of tasks cumul = np.cumsum(num_each).tolist() # the end indices for", "exceptions don't cause mpi to abort. This can lead to", "\"\"\" Use the below cleanup stuff only for intel-mpi! If", "be loaded. Falling back to fake MPI. This means that", "rank 0 never gets extra jobs) task_range = list(range(num_tasks)) #", "\", max(num_each) , \" tasks...\") my_tasks = each_tasks[rank] return comm,rank,my_tasks", "means that if you submitted multiple processes, they will all", "MPI = template() MPI.COMM_WORLD = fakeMpiComm() def mpi_distribute(num_tasks,avail_cores,allow_empty=False): # copied", "lead to thousands of # wasted CPU hours # def", "(so that rank 0 never gets extra jobs) task_range =", "True if disable_mpi_env.lower().strip() == \"true\" else False except: disable_mpi =", "# add the remainder to the last set of cores", "comm = MPI.COMM_WORLD rank = comm.Get_rank() numcores = comm.Get_size() num_each,each_tasks", "template() MPI.COMM_WORLD = fakeMpiComm() def mpi_distribute(num_tasks,avail_cores,allow_empty=False): # copied to mapsims.convert_noise_templates", "numcores = comm.Get_size() num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs) if rank==0: print (\"At", "in zip([0]+cumul[:-1],cumul)] # a list containing the tasks for each", "zip([0]+cumul[:-1],cumul)] # a list containing the tasks for each core", "Use the below cleanup stuff only for intel-mpi! If you", "comm.Get_size() num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs) if rank==0: print (\"At most \",", "traceback for errors causing hours of endless confusion and frustration!", "enlib.mpi: # Uncaught exceptions don't cause mpi to abort. This", "MPI.COMM_WORLD.Abort(1) # sys.excepthook = cleanup class fakeMpiComm: \"\"\" A Simple", "last set of cores (so that rank 0 never gets", "numpy as np import os,sys,time \"\"\" Copied from orphics.mpi \"\"\"", "if rem>0: num_each[-rem:] += 1 # add the remainder to", "hours of endless confusion and frustration! - Sincerely, past frustrated", "equally if rem>0: num_each[-rem:] += 1 # add the remainder", "num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs) if rank==0: print (\"At most \", max(num_each)", "assert len(num_each)==avail_cores assert len(task_dist)==avail_cores return num_each,task_dist def distribute(njobs,verbose=True,**kwargs): comm =", "the full range of tasks cumul = np.cumsum(num_each).tolist() # the", "stuff only for intel-mpi! If you use it on openmpi,", "if you submitted multiple processes, they will all be assigned", "comm.Get_rank() numcores = comm.Get_size() num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs) if rank==0: print", "Abort(self,dummy): pass try: if disable_mpi: raise from mpi4py import MPI", "frustration! - Sincerely, past frustrated Mat \"\"\" # From Sigurd's", "potentially doing the same thing.\") class template: pass MPI =", "sum(num_each)==num_tasks assert len(num_each)==avail_cores assert len(task_dist)==avail_cores return num_each,task_dist def distribute(njobs,verbose=True,**kwargs): comm", "np import os,sys,time \"\"\" Copied from orphics.mpi \"\"\" try: disable_mpi_env", "hours # def cleanup(type, value, traceback): # sys.__excepthook__(type, value, traceback)", "rank of 0, and they are potentially doing the same", "and frustration! - Sincerely, past frustrated Mat \"\"\" # From", "def Abort(self,dummy): pass try: if disable_mpi: raise from mpi4py import", "Sincerely, past frustrated Mat \"\"\" # From Sigurd's enlib.mpi: #", "mpi4py could not be loaded. Falling back to fake MPI.", "Simple Fake MPI implementation \"\"\" def __init__(self): pass def Get_rank(self):", "# From Sigurd's enlib.mpi: # Uncaught exceptions don't cause mpi", "abort. This can lead to thousands of # wasted CPU", "the tasks for each core assert sum(num_each)==num_tasks assert len(num_each)==avail_cores assert", "# MPI.COMM_WORLD.Abort(1) # sys.excepthook = cleanup class fakeMpiComm: \"\"\" A", "mpi4py import MPI except: if not(disable_mpi): print(\"WARNING: mpi4py could not", "past frustrated Mat \"\"\" # From Sigurd's enlib.mpi: # Uncaught" ]
[ "self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, wizards gatherer error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() urllib.request.urlretrieve(card_image,", "+ card_name) print('Set: ' + card_set) print('Set name: ' +", "self.get_multiverse_id(name) r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id)) r = json.loads(r.text) r", "= r.get('price_normal') us_card_price = r.get('us_normal') card_set = r.get('set_id') card_set_name =", "configparser import json from PIL import Image from ebaysdk.trading import", "r.get('multiverse_id') # Display card info in CLI print('Name: ' +", "' + card_name) print('Set: ' + card_set) print('Set name: '", "def __init__(self): self.api = Trading() config = configparser.ConfigParser() config.read('config.ini') with", "= QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, scryfall error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec()", "files=files) except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, PictShare", "self.msg.setText(\"Upload Failed, scryfall error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() def get_card_info_and_sell(self, name): try:", "self.yaml_config[\"PayPalEmailAddress\"], 'PrimaryCategory': {'CategoryID': '38292'}, 'ShippingDetails': {'ShippingType': 'Flat', 'ShippingServiceOptions': {'ShippingServicePriority': '1',", "name = re.sub(' ', '%20', name) r = requests.get('https://api.scryfall.com/cards/named?exact=' +", "Resize card base_height = 500 img = Image.open('temp.jpg') height_percent =", "re import configparser import json from PIL import Image from", "us_card_price = r.get('us_normal') card_set = r.get('set_id') card_set_name = r.get('set_name') card_id", "self.msg.setText(\"Upload Failed, wizards gatherer error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() urllib.request.urlretrieve(card_image, 'temp.jpg') #", "= img.resize((wsize, base_height), PIL.Image.ANTIALIAS) img.save('temp.png') # Upload to PictShare files", "r.get('us_normal') card_set = r.get('set_id') card_set_name = r.get('set_name') card_id = r.get('multiverse_id')", "Complete, please check log.txt\") self.msg.setStandardButtons(QMessageBox.Ok) with open('log.txt', 'a+') as log_file:", "QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, PictShare error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() print(r)", "= Trading() config = configparser.ConfigParser() config.read('config.ini') with open('details.yaml', 'r') as", "= image.dict() image = image['SiteHostedPictureDetails']['FullURL'] print(image) # Upload to ebay", "us_card_price, card_id): if us_card_price != 0: card_price = us_card_price *", "self.yaml_config = load(file) def upload_card(self, card_name, eu_card_price, us_card_price, card_id): if", "'1', 'ShippingService': self.yaml_config[ \"ShippingService\"], 'ShippingServiceCost': '1'}}}}) print(response.dict()) print(response.reply) self.msg =", "card info in CLI print('Name: ' + card_name) print('Set: '", "= load(file) def upload_card(self, card_name, eu_card_price, us_card_price, card_id): if us_card_price", "'PaymentMethods': 'PayPal', 'PayPalEmailAddress': self.yaml_config[\"PayPalEmailAddress\"], 'PrimaryCategory': {'CategoryID': '38292'}, 'ShippingDetails': {'ShippingType': 'Flat',", "= re.sub('\\\\.net//', '.net/', r) print(r) try: image = self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL':", "self.msg.setText(\"Upload Complete, please check your ebay account to confirm\") self.msg.setStandardButtons(QMessageBox.Ok)", "{'file': open('temp.png', 'rb')} try: r = requests.post('https://pictshare.net/api/upload.php', files=files) except: self.msg", "load from PyQt5.QtWidgets import QMessageBox class EbaySeller: def __init__(self): self.api", "e: print(e) print(e.response.dict()) def get_multiverse_id(self, name): try: name = re.sub('", "{'Title': card_name + ' MTG - NM/M', 'Description': card_name +", "scryfall error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() def get_card_info_and_sell(self, name): try: multiverse_id =", "self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, wizards gatherer error\")", "MTG - NM/M', 'Quantity': '1', 'PictureDetails': {'PictureURL': image}, 'ReturnPolicy': {'ReturnsAcceptedOption':", "card_price = 1 card_price = str(round(card_price, 2)) try: card_image =", "expression, may not be needed at a later date r", "r.get('price_normal') us_card_price = r.get('us_normal') card_set = r.get('set_id') card_set_name = r.get('set_name')", "if card_price < 1: card_price = 1 card_price = str(round(card_price,", "- NM/M', 'Description': card_name + ' MTG - NM/M', 'Quantity':", "str(multiverse_id)) r = json.loads(r.text) r = r[0] card_name = r.get('name')", "r}) image = image.dict() image = image['SiteHostedPictureDetails']['FullURL'] print(image) # Upload", "may not be needed at a later date r =", "at a later date r = re.sub('\\\\.net', '.net/', r) r", "try: multiverse_id = self.get_multiverse_id(name) r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id)) r", "to PictShare files = {'file': open('temp.png', 'rb')} try: r =", "'ReturnsNotAccepted'}, 'DispatchTimeMax': '3', 'ConditionID': '1000', 'StartPrice': card_price, 'PostalCode': self.yaml_config[\"PostalCode\"], 'Currency':", "= configparser.ConfigParser() config.read('config.ini') with open('details.yaml', 'r') as file: self.yaml_config =", "(base_height / float(img.size[1])) wsize = int((float(img.size[0]) * float(height_percent))) img =", "0.8 else: card_price = eu_card_price if card_price < 1: card_price", "card_set_name) print('Card ID: ' + str(card_id)) self.upload_card(card_name, eu_card_price, us_card_price, card_id)", "self.msg.exec() urllib.request.urlretrieve(card_image, 'temp.jpg') # Resize card base_height = 500 img", "json.loads(r.text) return r['multiverse_ids'][0] except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload", "0: card_price = us_card_price * 0.8 else: card_price = eu_card_price", "1 card_price = str(round(card_price, 2)) try: card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' +", "print('Set: ' + card_set) print('Set name: ' + card_set_name) print('Card", "self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() print(r) r = r.text r = json.loads(r) print(r)", "QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, card name not valid\") self.msg.setStandardButtons(QMessageBox.Ok)", "Failed, wizards gatherer error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() urllib.request.urlretrieve(card_image, 'temp.jpg') # Resize", "'GB', 'ListingDuration': 'Days_30', 'PaymentMethods': 'PayPal', 'PayPalEmailAddress': self.yaml_config[\"PayPalEmailAddress\"], 'PrimaryCategory': {'CategoryID': '38292'},", "'PictureDetails': {'PictureURL': image}, 'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax': '3', 'ConditionID': '1000',", "upload_card(self, card_name, eu_card_price, us_card_price, card_id): if us_card_price != 0: card_price", "Image.open('temp.jpg') height_percent = (base_height / float(img.size[1])) wsize = int((float(img.size[0]) *", "card_id = r.get('multiverse_id') # Display card info in CLI print('Name:", "print('Card ID: ' + str(card_id)) self.upload_card(card_name, eu_card_price, us_card_price, card_id) except:", "= self.api.execute('AddFixedPriceItem', { 'Item': {'Title': card_name + ' MTG -", "requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id)) r = json.loads(r.text) r = r[0] card_name", "'.net/', r) r = re.sub('\\\\.net//', '.net/', r) print(r) try: image", "self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, PictShare error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() print(r) r", "import load from PyQt5.QtWidgets import QMessageBox class EbaySeller: def __init__(self):", "card_price = str(round(card_price, 2)) try: card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id", "eu_card_price, us_card_price, card_id): if us_card_price != 0: card_price = us_card_price", "EbaySeller: def __init__(self): self.api = Trading() config = configparser.ConfigParser() config.read('config.ini')", "# Fix using regular expression, may not be needed at", "import configparser import json from PIL import Image from ebaysdk.trading", "r = json.loads(r) print(r) r = r['url'] # Fix using", "def get_multiverse_id(self, name): try: name = re.sub(' ', '%20', name)", "r = re.sub('\\\\.net//', '.net/', r) print(r) try: image = self.api.execute('UploadSiteHostedPictures',", "except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, wizards gatherer", "Connection as Trading from ebaysdk.exception import ConnectionError from yaml import", "str(round(card_price, 2)) try: card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id + '&type=card'", "get_card_info_and_sell(self, name): try: multiverse_id = self.get_multiverse_id(name) r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' +", "not be needed at a later date r = re.sub('\\\\.net',", "gatherer error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() urllib.request.urlretrieve(card_image, 'temp.jpg') # Resize card base_height", "log_file: log_file.write(response.reply) else: self.msg.setWindowTitle(\"Upload Complete\") self.msg.setText(\"Upload Complete, please check your", "as log_file: log_file.write(response.reply) else: self.msg.setWindowTitle(\"Upload Complete\") self.msg.setText(\"Upload Complete, please check", "using regular expression, may not be needed at a later", "= r.text r = json.loads(r) print(r) r = r['url'] #", "'Quantity': '1', 'PictureDetails': {'PictureURL': image}, 'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax': '3',", "== 'Failure': self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Complete, please check log.txt\") self.msg.setStandardButtons(QMessageBox.Ok)", "print(r) try: image = self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r}) image = image.dict()", "ebay response = self.api.execute('AddFixedPriceItem', { 'Item': {'Title': card_name + '", "{'CategoryID': '38292'}, 'ShippingDetails': {'ShippingType': 'Flat', 'ShippingServiceOptions': {'ShippingServicePriority': '1', 'ShippingService': self.yaml_config[", "import urllib.request import urllib.parse import PIL import re import configparser", "json.loads(r.text) r = r[0] card_name = r.get('name') eu_card_price = r.get('price_normal')", "print(response.reply) self.msg = QMessageBox() if response.reply.Ack == 'Failure': self.msg.setWindowTitle(\"Upload Failed\")", "import Image from ebaysdk.trading import Connection as Trading from ebaysdk.exception", "r.get('set_name') card_id = r.get('multiverse_id') # Display card info in CLI", "Complete, please check your ebay account to confirm\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec()", "= re.sub('\\\\.net', '.net/', r) r = re.sub('\\\\.net//', '.net/', r) print(r)", "else: self.msg.setWindowTitle(\"Upload Complete\") self.msg.setText(\"Upload Complete, please check your ebay account", "PictShare files = {'file': open('temp.png', 'rb')} try: r = requests.post('https://pictshare.net/api/upload.php',", "ebay account to confirm\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() except ConnectionError as e:", "multiverse_id = self.get_multiverse_id(name) r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id)) r =", "r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id)) r = json.loads(r.text) r =", "if response.reply.Ack == 'Failure': self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Complete, please check", "name) r = json.loads(r.text) return r['multiverse_ids'][0] except: self.msg = QMessageBox()", "from yaml import load from PyQt5.QtWidgets import QMessageBox class EbaySeller:", "'Days_30', 'PaymentMethods': 'PayPal', 'PayPalEmailAddress': self.yaml_config[\"PayPalEmailAddress\"], 'PrimaryCategory': {'CategoryID': '38292'}, 'ShippingDetails': {'ShippingType':", "= Image.open('temp.jpg') height_percent = (base_height / float(img.size[1])) wsize = int((float(img.size[0])", "= us_card_price * 0.8 else: card_price = eu_card_price if card_price", "r = requests.get('https://api.scryfall.com/cards/named?exact=' + name) r = json.loads(r.text) return r['multiverse_ids'][0]", "{'ShippingServicePriority': '1', 'ShippingService': self.yaml_config[ \"ShippingService\"], 'ShippingServiceCost': '1'}}}}) print(response.dict()) print(response.reply) self.msg", "'a+') as log_file: log_file.write(response.reply) else: self.msg.setWindowTitle(\"Upload Complete\") self.msg.setText(\"Upload Complete, please", "with open('details.yaml', 'r') as file: self.yaml_config = load(file) def upload_card(self,", "config = configparser.ConfigParser() config.read('config.ini') with open('details.yaml', 'r') as file: self.yaml_config", "import requests import urllib.request import urllib.parse import PIL import re", "= QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, wizards gatherer error\") self.msg.setStandardButtons(QMessageBox.Ok)", "* 0.8 else: card_price = eu_card_price if card_price < 1:", "json.loads(r) print(r) r = r['url'] # Fix using regular expression,", "import re import configparser import json from PIL import Image", "r) print(r) try: image = self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r}) image =", "'3', 'ConditionID': '1000', 'StartPrice': card_price, 'PostalCode': self.yaml_config[\"PostalCode\"], 'Currency': self.yaml_config[\"Currency\"], 'Country':", "requests.post('https://pictshare.net/api/upload.php', files=files) except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed,", "PyQt5.QtWidgets import QMessageBox class EbaySeller: def __init__(self): self.api = Trading()", "'Item': {'Title': card_name + ' MTG - NM/M', 'Description': card_name", "'PayPal', 'PayPalEmailAddress': self.yaml_config[\"PayPalEmailAddress\"], 'PrimaryCategory': {'CategoryID': '38292'}, 'ShippingDetails': {'ShippingType': 'Flat', 'ShippingServiceOptions':", "Failed\") self.msg.setText(\"Upload Complete, please check log.txt\") self.msg.setStandardButtons(QMessageBox.Ok) with open('log.txt', 'a+')", "!= 0: card_price = us_card_price * 0.8 else: card_price =", "name): try: multiverse_id = self.get_multiverse_id(name) r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id))", "' + card_set_name) print('Card ID: ' + str(card_id)) self.upload_card(card_name, eu_card_price,", "def upload_card(self, card_name, eu_card_price, us_card_price, card_id): if us_card_price != 0:", "Upload to PictShare files = {'file': open('temp.png', 'rb')} try: r", "'StartPrice': card_price, 'PostalCode': self.yaml_config[\"PostalCode\"], 'Currency': self.yaml_config[\"Currency\"], 'Country': 'GB', 'ListingDuration': 'Days_30',", "from ebaysdk.trading import Connection as Trading from ebaysdk.exception import ConnectionError", "except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, PictShare error\")", "r.get('set_id') card_set_name = r.get('set_name') card_id = r.get('multiverse_id') # Display card", "img = img.resize((wsize, base_height), PIL.Image.ANTIALIAS) img.save('temp.png') # Upload to PictShare", "self.yaml_config[\"Currency\"], 'Country': 'GB', 'ListingDuration': 'Days_30', 'PaymentMethods': 'PayPal', 'PayPalEmailAddress': self.yaml_config[\"PayPalEmailAddress\"], 'PrimaryCategory':", "class EbaySeller: def __init__(self): self.api = Trading() config = configparser.ConfigParser()", "self.api = Trading() config = configparser.ConfigParser() config.read('config.ini') with open('details.yaml', 'r')", "r = r[0] card_name = r.get('name') eu_card_price = r.get('price_normal') us_card_price", "'r') as file: self.yaml_config = load(file) def upload_card(self, card_name, eu_card_price,", "img.resize((wsize, base_height), PIL.Image.ANTIALIAS) img.save('temp.png') # Upload to PictShare files =", "to ebay response = self.api.execute('AddFixedPriceItem', { 'Item': {'Title': card_name +", "card_price = eu_card_price if card_price < 1: card_price = 1", "a later date r = re.sub('\\\\.net', '.net/', r) r =", "= (base_height / float(img.size[1])) wsize = int((float(img.size[0]) * float(height_percent))) img", "'ShippingServiceOptions': {'ShippingServicePriority': '1', 'ShippingService': self.yaml_config[ \"ShippingService\"], 'ShippingServiceCost': '1'}}}}) print(response.dict()) print(response.reply)", "'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id + '&type=card' except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload", "= r.get('set_name') card_id = r.get('multiverse_id') # Display card info in", "card_price, 'PostalCode': self.yaml_config[\"PostalCode\"], 'Currency': self.yaml_config[\"Currency\"], 'Country': 'GB', 'ListingDuration': 'Days_30', 'PaymentMethods':", "float(height_percent))) img = img.resize((wsize, base_height), PIL.Image.ANTIALIAS) img.save('temp.png') # Upload to", "image['SiteHostedPictureDetails']['FullURL'] print(image) # Upload to ebay response = self.api.execute('AddFixedPriceItem', {", "log_file.write(response.reply) else: self.msg.setWindowTitle(\"Upload Complete\") self.msg.setText(\"Upload Complete, please check your ebay", "MTG - NM/M', 'Description': card_name + ' MTG - NM/M',", "please check your ebay account to confirm\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() except", "confirm\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() except ConnectionError as e: print(e) print(e.response.dict()) def", "+ card_set) print('Set name: ' + card_set_name) print('Card ID: '", "please check log.txt\") self.msg.setStandardButtons(QMessageBox.Ok) with open('log.txt', 'a+') as log_file: log_file.write(response.reply)", "error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() urllib.request.urlretrieve(card_image, 'temp.jpg') # Resize card base_height =", "'rb')} try: r = requests.post('https://pictshare.net/api/upload.php', files=files) except: self.msg = QMessageBox()", "/ float(img.size[1])) wsize = int((float(img.size[0]) * float(height_percent))) img = img.resize((wsize,", "= requests.post('https://pictshare.net/api/upload.php', files=files) except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload", "try: r = requests.post('https://pictshare.net/api/upload.php', files=files) except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload", "try: card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id + '&type=card' except: self.msg", "self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, scryfall error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() def get_card_info_and_sell(self,", "QMessageBox() if response.reply.Ack == 'Failure': self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Complete, please", "'1'}}}}) print(response.dict()) print(response.reply) self.msg = QMessageBox() if response.reply.Ack == 'Failure':", "Upload to ebay response = self.api.execute('AddFixedPriceItem', { 'Item': {'Title': card_name", "card_id) except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, card", "import Connection as Trading from ebaysdk.exception import ConnectionError from yaml", "file: self.yaml_config = load(file) def upload_card(self, card_name, eu_card_price, us_card_price, card_id):", "{ 'Item': {'Title': card_name + ' MTG - NM/M', 'Description':", "self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() except ConnectionError as e: print(e) print(e.response.dict()) def get_multiverse_id(self,", "ebaysdk.trading import Connection as Trading from ebaysdk.exception import ConnectionError from", "{'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax': '3', 'ConditionID': '1000', 'StartPrice': card_price, 'PostalCode': self.yaml_config[\"PostalCode\"],", "urllib.parse import PIL import re import configparser import json from", "= {'file': open('temp.png', 'rb')} try: r = requests.post('https://pictshare.net/api/upload.php', files=files) except:", "else: card_price = eu_card_price if card_price < 1: card_price =", "PIL.Image.ANTIALIAS) img.save('temp.png') # Upload to PictShare files = {'file': open('temp.png',", "image.dict() image = image['SiteHostedPictureDetails']['FullURL'] print(image) # Upload to ebay response", "self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() urllib.request.urlretrieve(card_image, 'temp.jpg') # Resize card base_height = 500", "__init__(self): self.api = Trading() config = configparser.ConfigParser() config.read('config.ini') with open('details.yaml',", "ID: ' + str(card_id)) self.upload_card(card_name, eu_card_price, us_card_price, card_id) except: self.msg", "self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, scryfall error\") self.msg.setStandardButtons(QMessageBox.Ok)", "error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() def get_card_info_and_sell(self, name): try: multiverse_id = self.get_multiverse_id(name)", "'38292'}, 'ShippingDetails': {'ShippingType': 'Flat', 'ShippingServiceOptions': {'ShippingServicePriority': '1', 'ShippingService': self.yaml_config[ \"ShippingService\"],", "r = json.loads(r.text) r = r[0] card_name = r.get('name') eu_card_price", "base_height), PIL.Image.ANTIALIAS) img.save('temp.png') # Upload to PictShare files = {'file':", "= r.get('multiverse_id') # Display card info in CLI print('Name: '", "ebaysdk.exception import ConnectionError from yaml import load from PyQt5.QtWidgets import", "img.save('temp.png') # Upload to PictShare files = {'file': open('temp.png', 'rb')}", "card_set_name = r.get('set_name') card_id = r.get('multiverse_id') # Display card info", "import urllib.parse import PIL import re import configparser import json", "QMessageBox class EbaySeller: def __init__(self): self.api = Trading() config =", "image}, 'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax': '3', 'ConditionID': '1000', 'StartPrice': card_price,", "from ebaysdk.exception import ConnectionError from yaml import load from PyQt5.QtWidgets", "r = requests.post('https://pictshare.net/api/upload.php', files=files) except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\")", "' MTG - NM/M', 'Description': card_name + ' MTG -", "log.txt\") self.msg.setStandardButtons(QMessageBox.Ok) with open('log.txt', 'a+') as log_file: log_file.write(response.reply) else: self.msg.setWindowTitle(\"Upload", "card_set) print('Set name: ' + card_set_name) print('Card ID: ' +", "500 img = Image.open('temp.jpg') height_percent = (base_height / float(img.size[1])) wsize", "= self.get_multiverse_id(name) r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id)) r = json.loads(r.text)", "+ ' MTG - NM/M', 'Description': card_name + ' MTG", "self.yaml_config[ \"ShippingService\"], 'ShippingServiceCost': '1'}}}}) print(response.dict()) print(response.reply) self.msg = QMessageBox() if", "self.msg.setStandardButtons(QMessageBox.Ok) with open('log.txt', 'a+') as log_file: log_file.write(response.reply) else: self.msg.setWindowTitle(\"Upload Complete\")", "float(img.size[1])) wsize = int((float(img.size[0]) * float(height_percent))) img = img.resize((wsize, base_height),", "{'ExternalPictureURL': r}) image = image.dict() image = image['SiteHostedPictureDetails']['FullURL'] print(image) #", "Failed, scryfall error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() def get_card_info_and_sell(self, name): try: multiverse_id", "PIL import re import configparser import json from PIL import", "print('Name: ' + card_name) print('Set: ' + card_set) print('Set name:", "print(response.dict()) print(response.reply) self.msg = QMessageBox() if response.reply.Ack == 'Failure': self.msg.setWindowTitle(\"Upload", "+ str(card_id)) self.upload_card(card_name, eu_card_price, us_card_price, card_id) except: self.msg = QMessageBox()", "= r.get('name') eu_card_price = r.get('price_normal') us_card_price = r.get('us_normal') card_set =", "'1000', 'StartPrice': card_price, 'PostalCode': self.yaml_config[\"PostalCode\"], 'Currency': self.yaml_config[\"Currency\"], 'Country': 'GB', 'ListingDuration':", "self.msg.setText(\"Upload Failed, PictShare error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() print(r) r = r.text", "PictShare error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() print(r) r = r.text r =", "open('log.txt', 'a+') as log_file: log_file.write(response.reply) else: self.msg.setWindowTitle(\"Upload Complete\") self.msg.setText(\"Upload Complete,", "card_set = r.get('set_id') card_set_name = r.get('set_name') card_id = r.get('multiverse_id') #", "self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r}) image = image.dict() image = image['SiteHostedPictureDetails']['FullURL'] print(image)", "files = {'file': open('temp.png', 'rb')} try: r = requests.post('https://pictshare.net/api/upload.php', files=files)", "requests import urllib.request import urllib.parse import PIL import re import", "self.msg.setWindowTitle(\"Upload Complete\") self.msg.setText(\"Upload Complete, please check your ebay account to", "ConnectionError as e: print(e) print(e.response.dict()) def get_multiverse_id(self, name): try: name", "'Description': card_name + ' MTG - NM/M', 'Quantity': '1', 'PictureDetails':", "card_name) print('Set: ' + card_set) print('Set name: ' + card_set_name)", "r.get('name') eu_card_price = r.get('price_normal') us_card_price = r.get('us_normal') card_set = r.get('set_id')", "base_height = 500 img = Image.open('temp.jpg') height_percent = (base_height /", "urllib.request import urllib.parse import PIL import re import configparser import", "card_name = r.get('name') eu_card_price = r.get('price_normal') us_card_price = r.get('us_normal') card_set", "<reponame>SpironoZeppeli/Magic-The-Scannening import requests import urllib.request import urllib.parse import PIL import", "urllib.request.urlretrieve(card_image, 'temp.jpg') # Resize card base_height = 500 img =", "eu_card_price if card_price < 1: card_price = 1 card_price =", "= r[0] card_name = r.get('name') eu_card_price = r.get('price_normal') us_card_price =", "'&type=card' except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, wizards", "self.yaml_config[\"PostalCode\"], 'Currency': self.yaml_config[\"Currency\"], 'Country': 'GB', 'ListingDuration': 'Days_30', 'PaymentMethods': 'PayPal', 'PayPalEmailAddress':", "requests.get('https://api.scryfall.com/cards/named?exact=' + name) r = json.loads(r.text) return r['multiverse_ids'][0] except: self.msg", "card_id): if us_card_price != 0: card_price = us_card_price * 0.8", "# Resize card base_height = 500 img = Image.open('temp.jpg') height_percent", "try: image = self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r}) image = image.dict() image", "str(card_id)) self.upload_card(card_name, eu_card_price, us_card_price, card_id) except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload", "re.sub('\\\\.net//', '.net/', r) print(r) try: image = self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r})", "+ card_set_name) print('Card ID: ' + str(card_id)) self.upload_card(card_name, eu_card_price, us_card_price,", "= QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, card name not valid\")", "except ConnectionError as e: print(e) print(e.response.dict()) def get_multiverse_id(self, name): try:", "= eu_card_price if card_price < 1: card_price = 1 card_price", "card_price < 1: card_price = 1 card_price = str(round(card_price, 2))", "ConnectionError from yaml import load from PyQt5.QtWidgets import QMessageBox class", "re.sub('\\\\.net', '.net/', r) r = re.sub('\\\\.net//', '.net/', r) print(r) try:", "Trading() config = configparser.ConfigParser() config.read('config.ini') with open('details.yaml', 'r') as file:", "us_card_price, card_id) except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed,", "config.read('config.ini') with open('details.yaml', 'r') as file: self.yaml_config = load(file) def", "2)) try: card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id + '&type=card' except:", "= int((float(img.size[0]) * float(height_percent))) img = img.resize((wsize, base_height), PIL.Image.ANTIALIAS) img.save('temp.png')", "'DispatchTimeMax': '3', 'ConditionID': '1000', 'StartPrice': card_price, 'PostalCode': self.yaml_config[\"PostalCode\"], 'Currency': self.yaml_config[\"Currency\"],", "+ name) r = json.loads(r.text) return r['multiverse_ids'][0] except: self.msg =", "card_name + ' MTG - NM/M', 'Quantity': '1', 'PictureDetails': {'PictureURL':", "= json.loads(r) print(r) r = r['url'] # Fix using regular", "'Flat', 'ShippingServiceOptions': {'ShippingServicePriority': '1', 'ShippingService': self.yaml_config[ \"ShippingService\"], 'ShippingServiceCost': '1'}}}}) print(response.dict())", "= r.get('set_id') card_set_name = r.get('set_name') card_id = r.get('multiverse_id') # Display", "except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, scryfall error\")", "open('temp.png', 'rb')} try: r = requests.post('https://pictshare.net/api/upload.php', files=files) except: self.msg =", "'Failure': self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Complete, please check log.txt\") self.msg.setStandardButtons(QMessageBox.Ok) with", "QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, wizards gatherer error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec()", "'PrimaryCategory': {'CategoryID': '38292'}, 'ShippingDetails': {'ShippingType': 'Flat', 'ShippingServiceOptions': {'ShippingServicePriority': '1', 'ShippingService':", "print(r) r = r['url'] # Fix using regular expression, may", "eu_card_price = r.get('price_normal') us_card_price = r.get('us_normal') card_set = r.get('set_id') card_set_name", "later date r = re.sub('\\\\.net', '.net/', r) r = re.sub('\\\\.net//',", "r['url'] # Fix using regular expression, may not be needed", "us_card_price != 0: card_price = us_card_price * 0.8 else: card_price", "'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax': '3', 'ConditionID': '1000', 'StartPrice': card_price, 'PostalCode':", "return r['multiverse_ids'][0] except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed,", "with open('log.txt', 'a+') as log_file: log_file.write(response.reply) else: self.msg.setWindowTitle(\"Upload Complete\") self.msg.setText(\"Upload", "'.net/', r) print(r) try: image = self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r}) image", "print(image) # Upload to ebay response = self.api.execute('AddFixedPriceItem', { 'Item':", "img = Image.open('temp.jpg') height_percent = (base_height / float(img.size[1])) wsize =", "= requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id)) r = json.loads(r.text) r = r[0]", "= 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id + '&type=card' except: self.msg = QMessageBox()", "self.msg = QMessageBox() if response.reply.Ack == 'Failure': self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload", "PIL import Image from ebaysdk.trading import Connection as Trading from", "{'ShippingType': 'Flat', 'ShippingServiceOptions': {'ShippingServicePriority': '1', 'ShippingService': self.yaml_config[ \"ShippingService\"], 'ShippingServiceCost': '1'}}}})", "'Country': 'GB', 'ListingDuration': 'Days_30', 'PaymentMethods': 'PayPal', 'PayPalEmailAddress': self.yaml_config[\"PayPalEmailAddress\"], 'PrimaryCategory': {'CategoryID':", "response.reply.Ack == 'Failure': self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Complete, please check log.txt\")", "yaml import load from PyQt5.QtWidgets import QMessageBox class EbaySeller: def", "be needed at a later date r = re.sub('\\\\.net', '.net/',", "name: ' + card_set_name) print('Card ID: ' + str(card_id)) self.upload_card(card_name,", "height_percent = (base_height / float(img.size[1])) wsize = int((float(img.size[0]) * float(height_percent)))", "open('details.yaml', 'r') as file: self.yaml_config = load(file) def upload_card(self, card_name,", "= re.sub(' ', '%20', name) r = requests.get('https://api.scryfall.com/cards/named?exact=' + name)", "'ListingDuration': 'Days_30', 'PaymentMethods': 'PayPal', 'PayPalEmailAddress': self.yaml_config[\"PayPalEmailAddress\"], 'PrimaryCategory': {'CategoryID': '38292'}, 'ShippingDetails':", "' + str(card_id)) self.upload_card(card_name, eu_card_price, us_card_price, card_id) except: self.msg =", "name): try: name = re.sub(' ', '%20', name) r =", "'ShippingServiceCost': '1'}}}}) print(response.dict()) print(response.reply) self.msg = QMessageBox() if response.reply.Ack ==", "re.sub(' ', '%20', name) r = requests.get('https://api.scryfall.com/cards/named?exact=' + name) r", "r.text r = json.loads(r) print(r) r = r['url'] # Fix", "def get_card_info_and_sell(self, name): try: multiverse_id = self.get_multiverse_id(name) r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]='", "except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, card name", "try: name = re.sub(' ', '%20', name) r = requests.get('https://api.scryfall.com/cards/named?exact='", "card_name, eu_card_price, us_card_price, card_id): if us_card_price != 0: card_price =", "import ConnectionError from yaml import load from PyQt5.QtWidgets import QMessageBox", "NM/M', 'Description': card_name + ' MTG - NM/M', 'Quantity': '1',", "name) r = requests.get('https://api.scryfall.com/cards/named?exact=' + name) r = json.loads(r.text) return", "get_multiverse_id(self, name): try: name = re.sub(' ', '%20', name) r", "'ShippingService': self.yaml_config[ \"ShippingService\"], 'ShippingServiceCost': '1'}}}}) print(response.dict()) print(response.reply) self.msg = QMessageBox()", "self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, PictShare error\") self.msg.setStandardButtons(QMessageBox.Ok)", "# Upload to PictShare files = {'file': open('temp.png', 'rb')} try:", "needed at a later date r = re.sub('\\\\.net', '.net/', r)", "self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, card name not", "check log.txt\") self.msg.setStandardButtons(QMessageBox.Ok) with open('log.txt', 'a+') as log_file: log_file.write(response.reply) else:", "# Upload to ebay response = self.api.execute('AddFixedPriceItem', { 'Item': {'Title':", "+ '&type=card' except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed,", "NM/M', 'Quantity': '1', 'PictureDetails': {'PictureURL': image}, 'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax':", "= image['SiteHostedPictureDetails']['FullURL'] print(image) # Upload to ebay response = self.api.execute('AddFixedPriceItem',", "' + card_set) print('Set name: ' + card_set_name) print('Card ID:", "account to confirm\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() except ConnectionError as e: print(e)", "self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Complete, please check log.txt\") self.msg.setStandardButtons(QMessageBox.Ok) with open('log.txt',", "eu_card_price, us_card_price, card_id) except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload", "card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id + '&type=card' except: self.msg =", "as file: self.yaml_config = load(file) def upload_card(self, card_name, eu_card_price, us_card_price,", "\"ShippingService\"], 'ShippingServiceCost': '1'}}}}) print(response.dict()) print(response.reply) self.msg = QMessageBox() if response.reply.Ack", "= r.get('us_normal') card_set = r.get('set_id') card_set_name = r.get('set_name') card_id =", "= QMessageBox() if response.reply.Ack == 'Failure': self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Complete,", "# Display card info in CLI print('Name: ' + card_name)", "card_price = us_card_price * 0.8 else: card_price = eu_card_price if", "image = image.dict() image = image['SiteHostedPictureDetails']['FullURL'] print(image) # Upload to", "'ConditionID': '1000', 'StartPrice': card_price, 'PostalCode': self.yaml_config[\"PostalCode\"], 'Currency': self.yaml_config[\"Currency\"], 'Country': 'GB',", "self.msg.setText(\"Upload Complete, please check log.txt\") self.msg.setStandardButtons(QMessageBox.Ok) with open('log.txt', 'a+') as", "Failed\") self.msg.setText(\"Upload Failed, scryfall error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() def get_card_info_and_sell(self, name):", "+ card_id + '&type=card' except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\")", "import json from PIL import Image from ebaysdk.trading import Connection", "import QMessageBox class EbaySeller: def __init__(self): self.api = Trading() config", "= requests.get('https://api.scryfall.com/cards/named?exact=' + name) r = json.loads(r.text) return r['multiverse_ids'][0] except:", "', '%20', name) r = requests.get('https://api.scryfall.com/cards/named?exact=' + name) r =", "print(e.response.dict()) def get_multiverse_id(self, name): try: name = re.sub(' ', '%20',", "import PIL import re import configparser import json from PIL", "r) r = re.sub('\\\\.net//', '.net/', r) print(r) try: image =", "'%20', name) r = requests.get('https://api.scryfall.com/cards/named?exact=' + name) r = json.loads(r.text)", "self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, card name not valid\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec()", "r['multiverse_ids'][0] except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, scryfall", "card_id + '&type=card' except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload", "int((float(img.size[0]) * float(height_percent))) img = img.resize((wsize, base_height), PIL.Image.ANTIALIAS) img.save('temp.png') #", "self.upload_card(card_name, eu_card_price, us_card_price, card_id) except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\")", "= 1 card_price = str(round(card_price, 2)) try: card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid='", "- NM/M', 'Quantity': '1', 'PictureDetails': {'PictureURL': image}, 'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'},", "QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, scryfall error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() def", "= r['url'] # Fix using regular expression, may not be", "= QMessageBox() self.msg.setWindowTitle(\"Upload Failed\") self.msg.setText(\"Upload Failed, PictShare error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec()", "info in CLI print('Name: ' + card_name) print('Set: ' +", "'ShippingDetails': {'ShippingType': 'Flat', 'ShippingServiceOptions': {'ShippingServicePriority': '1', 'ShippingService': self.yaml_config[ \"ShippingService\"], 'ShippingServiceCost':", "print('Set name: ' + card_set_name) print('Card ID: ' + str(card_id))", "self.msg.exec() def get_card_info_and_sell(self, name): try: multiverse_id = self.get_multiverse_id(name) r =", "1: card_price = 1 card_price = str(round(card_price, 2)) try: card_image", "Failed\") self.msg.setText(\"Upload Failed, wizards gatherer error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() urllib.request.urlretrieve(card_image, 'temp.jpg')", "print(r) r = r.text r = json.loads(r) print(r) r =", "r = json.loads(r.text) return r['multiverse_ids'][0] except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload", "configparser.ConfigParser() config.read('config.ini') with open('details.yaml', 'r') as file: self.yaml_config = load(file)", "= 500 img = Image.open('temp.jpg') height_percent = (base_height / float(img.size[1]))", "= json.loads(r.text) return r['multiverse_ids'][0] except: self.msg = QMessageBox() self.msg.setWindowTitle(\"Upload Failed\")", "self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() def get_card_info_and_sell(self, name): try: multiverse_id = self.get_multiverse_id(name) r", "'PayPalEmailAddress': self.yaml_config[\"PayPalEmailAddress\"], 'PrimaryCategory': {'CategoryID': '38292'}, 'ShippingDetails': {'ShippingType': 'Flat', 'ShippingServiceOptions': {'ShippingServicePriority':", "card base_height = 500 img = Image.open('temp.jpg') height_percent = (base_height", "self.api.execute('AddFixedPriceItem', { 'Item': {'Title': card_name + ' MTG - NM/M',", "print(e) print(e.response.dict()) def get_multiverse_id(self, name): try: name = re.sub(' ',", "r[0] card_name = r.get('name') eu_card_price = r.get('price_normal') us_card_price = r.get('us_normal')", "Trading from ebaysdk.exception import ConnectionError from yaml import load from", "r = r['url'] # Fix using regular expression, may not", "to confirm\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() except ConnectionError as e: print(e) print(e.response.dict())", "= json.loads(r.text) r = r[0] card_name = r.get('name') eu_card_price =", "'Currency': self.yaml_config[\"Currency\"], 'Country': 'GB', 'ListingDuration': 'Days_30', 'PaymentMethods': 'PayPal', 'PayPalEmailAddress': self.yaml_config[\"PayPalEmailAddress\"],", "CLI print('Name: ' + card_name) print('Set: ' + card_set) print('Set", "= self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r}) image = image.dict() image = image['SiteHostedPictureDetails']['FullURL']", "check your ebay account to confirm\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() except ConnectionError", "if us_card_price != 0: card_price = us_card_price * 0.8 else:", "* float(height_percent))) img = img.resize((wsize, base_height), PIL.Image.ANTIALIAS) img.save('temp.png') # Upload", "self.msg.exec() print(r) r = r.text r = json.loads(r) print(r) r", "image = image['SiteHostedPictureDetails']['FullURL'] print(image) # Upload to ebay response =", "json from PIL import Image from ebaysdk.trading import Connection as", "response = self.api.execute('AddFixedPriceItem', { 'Item': {'Title': card_name + ' MTG", "Image from ebaysdk.trading import Connection as Trading from ebaysdk.exception import", "as Trading from ebaysdk.exception import ConnectionError from yaml import load", "in CLI print('Name: ' + card_name) print('Set: ' + card_set)", "from PIL import Image from ebaysdk.trading import Connection as Trading", "= str(round(card_price, 2)) try: card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id +", "' MTG - NM/M', 'Quantity': '1', 'PictureDetails': {'PictureURL': image}, 'ReturnPolicy':", "image = self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r}) image = image.dict() image =", "us_card_price * 0.8 else: card_price = eu_card_price if card_price <", "regular expression, may not be needed at a later date", "error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() print(r) r = r.text r = json.loads(r)", "r = re.sub('\\\\.net', '.net/', r) r = re.sub('\\\\.net//', '.net/', r)", "< 1: card_price = 1 card_price = str(round(card_price, 2)) try:", "'temp.jpg') # Resize card base_height = 500 img = Image.open('temp.jpg')", "+ str(multiverse_id)) r = json.loads(r.text) r = r[0] card_name =", "Display card info in CLI print('Name: ' + card_name) print('Set:", "card_name + ' MTG - NM/M', 'Description': card_name + '", "from PyQt5.QtWidgets import QMessageBox class EbaySeller: def __init__(self): self.api =", "Failed\") self.msg.setText(\"Upload Failed, PictShare error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() print(r) r =", "wizards gatherer error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() urllib.request.urlretrieve(card_image, 'temp.jpg') # Resize card", "wsize = int((float(img.size[0]) * float(height_percent))) img = img.resize((wsize, base_height), PIL.Image.ANTIALIAS)", "Complete\") self.msg.setText(\"Upload Complete, please check your ebay account to confirm\")", "{'PictureURL': image}, 'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax': '3', 'ConditionID': '1000', 'StartPrice':", "+ ' MTG - NM/M', 'Quantity': '1', 'PictureDetails': {'PictureURL': image},", "Fix using regular expression, may not be needed at a", "date r = re.sub('\\\\.net', '.net/', r) r = re.sub('\\\\.net//', '.net/',", "Failed, PictShare error\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() print(r) r = r.text r", "your ebay account to confirm\") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() except ConnectionError as", "r = r.text r = json.loads(r) print(r) r = r['url']", "'1', 'PictureDetails': {'PictureURL': image}, 'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax': '3', 'ConditionID':", "self.msg.exec() except ConnectionError as e: print(e) print(e.response.dict()) def get_multiverse_id(self, name):", "load(file) def upload_card(self, card_name, eu_card_price, us_card_price, card_id): if us_card_price !=", "as e: print(e) print(e.response.dict()) def get_multiverse_id(self, name): try: name =", "'PostalCode': self.yaml_config[\"PostalCode\"], 'Currency': self.yaml_config[\"Currency\"], 'Country': 'GB', 'ListingDuration': 'Days_30', 'PaymentMethods': 'PayPal'," ]
[ "user's GitHub information. ├ issue Command to retrieve issue(s) from", "if repository is None: user = \"gurkult\" else: user, _,", "None: \"\"\"Commands for Github.\"\"\" await ctx.send_help(ctx.command) @github_group.command(name=\"profile\") @commands.cooldown(1, 10, BucketType.user)", "├ issue Command to retrieve issue(s) from a GitHub repository.", "Repository\") embed.add_field(name=\"Repository\", value=f\"[Go to GitHub]({BOT_REPO_URL})\") embed.set_thumbnail(url=self.bot.user.avatar_url) await ctx.send(embed=embed) return elif", "_profile.GithubInfo(self.bot.http_session) embed = await github_profile.get_github_info(username) await ctx.send(embed=embed) @github_group.command(name=\"issue\", aliases=(\"pr\",)) async", "optional and sends the help command if not specified. \"\"\"", "retrieve issue(s) from a GitHub repository. └ source Displays information", "a GitHub repository. └ source Displays information about the bot's", "*, source_item: typing.Optional[str] = None ) -> None: \"\"\"Displays information", "elif not ctx.bot.get_command(source_item): raise commands.BadArgument( f\"Unable to convert `{source_item}` to", "\"\"\"Commands for Github.\"\"\" await ctx.send_help(ctx.command) @github_group.command(name=\"profile\") @commands.cooldown(1, 10, BucketType.user) async", "repository. └ source Displays information about the bot's source code.", "invoke_without_command=True) async def github_group(self, ctx: commands.Context) -> None: \"\"\"Commands for", "= \"gurkult\" else: user, _, repository = repository.rpartition(\"/\") if user", "import _issues, _profile, _source class Github(commands.Cog): \"\"\" Github Category cog,", "None: \"\"\"Command to retrieve issue(s) from a GitHub repository.\"\"\" github_issue", "contains commands related to github. Commands: ├ profile Fetches a", "ctx.send(embed=embed) def setup(bot: commands.Bot) -> None: \"\"\"Load the Github cog.\"\"\"", "to retrieve issue(s) from a GitHub repository. └ source Displays", "typing from bot.constants import BOT_REPO_URL from discord import Embed from", "@github_group.command(name=\"source\", aliases=(\"src\", \"inspect\")) async def source_command( self, ctx: commands.Context, *,", "issue(s) from a GitHub repository.\"\"\" github_issue = _issues.Issues(self.bot.http_session) if not", "from bot.constants import BOT_REPO_URL from discord import Embed from discord.ext", "convert `{source_item}` to valid command or Cog.\" ) github_source =", "Fetches a user's GitHub information. ├ issue Command to retrieve", ") -> None: \"\"\"Command to retrieve issue(s) from a GitHub", "= await github_issue.issue(ctx.message.channel, numbers, repository, user) await ctx.send(embed=embed) @github_group.command(name=\"source\", aliases=(\"src\",", "which contains commands related to github. Commands: ├ profile Fetches", "commands related to github. Commands: ├ profile Fetches a user's", "-> None: \"\"\"Displays information about the bot's source code.\"\"\" if", "bot @commands.group(name=\"github\", aliases=(\"gh\",), invoke_without_command=True) async def github_group(self, ctx: commands.Context) ->", "GitHub information. ├ issue Command to retrieve issue(s) from a", "discord import Embed from discord.ext import commands from discord.ext.commands.cooldowns import", "commands.Bot) -> None: self.bot = bot @commands.group(name=\"github\", aliases=(\"gh\",), invoke_without_command=True) async", "is optional and sends the help command if not specified.", "raise commands.BadArgument( f\"Unable to convert `{source_item}` to valid command or", "repository.rpartition(\"/\") if user == \"\": user = \"gurkult\" embed =", "-> None: \"\"\"Command to retrieve issue(s) from a GitHub repository.\"\"\"", "def github_group(self, ctx: commands.Context) -> None: \"\"\"Commands for Github.\"\"\" await", "embed.add_field(name=\"Repository\", value=f\"[Go to GitHub]({BOT_REPO_URL})\") embed.set_thumbnail(url=self.bot.user.avatar_url) await ctx.send(embed=embed) return elif not", "user's GitHub information. Username is optional and sends the help", "\"\"\" def __init__(self, bot: commands.Bot) -> None: self.bot = bot", "commands.Context, username: str) -> None: \"\"\" Fetches a user's GitHub", "from a GitHub repository.\"\"\" github_issue = _issues.Issues(self.bot.http_session) if not numbers:", "source_item is None: embed = Embed(title=\"Gurkbot's GitHub Repository\") embed.add_field(name=\"Repository\", value=f\"[Go", "not ctx.bot.get_command(source_item): raise commands.BadArgument( f\"Unable to convert `{source_item}` to valid", "GitHub repository. └ source Displays information about the bot's source", "None: self.bot = bot @commands.group(name=\"github\", aliases=(\"gh\",), invoke_without_command=True) async def github_group(self,", "the help command if not specified. \"\"\" github_profile = _profile.GithubInfo(self.bot.http_session)", "github_profile = _profile.GithubInfo(self.bot.http_session) embed = await github_profile.get_github_info(username) await ctx.send(embed=embed) @github_group.command(name=\"issue\",", "issue( self, ctx: commands.Context, numbers: commands.Greedy[int], repository: typing.Optional[str] = None,", "if not numbers: raise commands.MissingRequiredArgument(ctx.command.clean_params[\"numbers\"]) if repository is None: user", "if user == \"\": user = \"gurkult\" embed = await", "Github Category cog, which contains commands related to github. Commands:", ". import _issues, _profile, _source class Github(commands.Cog): \"\"\" Github Category", "= await github_profile.get_github_info(username) await ctx.send(embed=embed) @github_group.command(name=\"issue\", aliases=(\"pr\",)) async def issue(", "issue Command to retrieve issue(s) from a GitHub repository. └", "BOT_REPO_URL from discord import Embed from discord.ext import commands from", "related to github. Commands: ├ profile Fetches a user's GitHub", "\"\"\" github_profile = _profile.GithubInfo(self.bot.http_session) embed = await github_profile.get_github_info(username) await ctx.send(embed=embed)", "str) -> None: \"\"\" Fetches a user's GitHub information. Username", "Embed(title=\"Gurkbot's GitHub Repository\") embed.add_field(name=\"Repository\", value=f\"[Go to GitHub]({BOT_REPO_URL})\") embed.set_thumbnail(url=self.bot.user.avatar_url) await ctx.send(embed=embed)", "about the bot's source code.\"\"\" if source_item is None: embed", "BucketType from . import _issues, _profile, _source class Github(commands.Cog): \"\"\"", "from a GitHub repository. └ source Displays information about the", "aliases=(\"gh\",), invoke_without_command=True) async def github_group(self, ctx: commands.Context) -> None: \"\"\"Commands", "def profile(self, ctx: commands.Context, username: str) -> None: \"\"\" Fetches", "if not specified. \"\"\" github_profile = _profile.GithubInfo(self.bot.http_session) embed = await", "@commands.cooldown(1, 10, BucketType.user) async def profile(self, ctx: commands.Context, username: str)", "profile(self, ctx: commands.Context, username: str) -> None: \"\"\" Fetches a", "user = \"gurkult\" embed = await github_issue.issue(ctx.message.channel, numbers, repository, user)", "embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item)) await ctx.send(embed=embed) def setup(bot: commands.Bot) ->", "commands.Greedy[int], repository: typing.Optional[str] = None, ) -> None: \"\"\"Command to", "not numbers: raise commands.MissingRequiredArgument(ctx.command.clean_params[\"numbers\"]) if repository is None: user =", "__init__(self, bot: commands.Bot) -> None: self.bot = bot @commands.group(name=\"github\", aliases=(\"gh\",),", "\"inspect\")) async def source_command( self, ctx: commands.Context, *, source_item: typing.Optional[str]", "= None ) -> None: \"\"\"Displays information about the bot's", "BucketType.user) async def profile(self, ctx: commands.Context, username: str) -> None:", "await github_profile.get_github_info(username) await ctx.send(embed=embed) @github_group.command(name=\"issue\", aliases=(\"pr\",)) async def issue( self,", "a user's GitHub information. ├ issue Command to retrieve issue(s)", "issue(s) from a GitHub repository. └ source Displays information about", "await ctx.send_help(ctx.command) @github_group.command(name=\"profile\") @commands.cooldown(1, 10, BucketType.user) async def profile(self, ctx:", "github_issue.issue(ctx.message.channel, numbers, repository, user) await ctx.send(embed=embed) @github_group.command(name=\"source\", aliases=(\"src\", \"inspect\")) async", "source_command( self, ctx: commands.Context, *, source_item: typing.Optional[str] = None )", "async def profile(self, ctx: commands.Context, username: str) -> None: \"\"\"", "= _profile.GithubInfo(self.bot.http_session) embed = await github_profile.get_github_info(username) await ctx.send(embed=embed) @github_group.command(name=\"issue\", aliases=(\"pr\",))", "github_issue = _issues.Issues(self.bot.http_session) if not numbers: raise commands.MissingRequiredArgument(ctx.command.clean_params[\"numbers\"]) if repository", "async def issue( self, ctx: commands.Context, numbers: commands.Greedy[int], repository: typing.Optional[str]", "await ctx.send(embed=embed) return elif not ctx.bot.get_command(source_item): raise commands.BadArgument( f\"Unable to", "await ctx.send(embed=embed) @github_group.command(name=\"issue\", aliases=(\"pr\",)) async def issue( self, ctx: commands.Context,", "a GitHub repository.\"\"\" github_issue = _issues.Issues(self.bot.http_session) if not numbers: raise", "repository: typing.Optional[str] = None, ) -> None: \"\"\"Command to retrieve", "numbers, repository, user) await ctx.send(embed=embed) @github_group.command(name=\"source\", aliases=(\"src\", \"inspect\")) async def", "source_item: typing.Optional[str] = None ) -> None: \"\"\"Displays information about", "discord.ext.commands.cooldowns import BucketType from . import _issues, _profile, _source class", "@github_group.command(name=\"issue\", aliases=(\"pr\",)) async def issue( self, ctx: commands.Context, numbers: commands.Greedy[int],", "from discord import Embed from discord.ext import commands from discord.ext.commands.cooldowns", "a user's GitHub information. Username is optional and sends the", "user == \"\": user = \"gurkult\" embed = await github_issue.issue(ctx.message.channel,", "class Github(commands.Cog): \"\"\" Github Category cog, which contains commands related", "from . import _issues, _profile, _source class Github(commands.Cog): \"\"\" Github", "await github_issue.issue(ctx.message.channel, numbers, repository, user) await ctx.send(embed=embed) @github_group.command(name=\"source\", aliases=(\"src\", \"inspect\"))", "github. Commands: ├ profile Fetches a user's GitHub information. ├", "information about the bot's source code.\"\"\" if source_item is None:", "None: embed = Embed(title=\"Gurkbot's GitHub Repository\") embed.add_field(name=\"Repository\", value=f\"[Go to GitHub]({BOT_REPO_URL})\")", "Commands: ├ profile Fetches a user's GitHub information. ├ issue", "= Embed(title=\"Gurkbot's GitHub Repository\") embed.add_field(name=\"Repository\", value=f\"[Go to GitHub]({BOT_REPO_URL})\") embed.set_thumbnail(url=self.bot.user.avatar_url) await", "profile Fetches a user's GitHub information. ├ issue Command to", "to valid command or Cog.\" ) github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url)", "code.\"\"\" if source_item is None: embed = Embed(title=\"Gurkbot's GitHub Repository\")", "GitHub repository.\"\"\" github_issue = _issues.Issues(self.bot.http_session) if not numbers: raise commands.MissingRequiredArgument(ctx.command.clean_params[\"numbers\"])", "raise commands.MissingRequiredArgument(ctx.command.clean_params[\"numbers\"]) if repository is None: user = \"gurkult\" else:", "username: str) -> None: \"\"\" Fetches a user's GitHub information.", "repository, user) await ctx.send(embed=embed) @github_group.command(name=\"source\", aliases=(\"src\", \"inspect\")) async def source_command(", "import Embed from discord.ext import commands from discord.ext.commands.cooldowns import BucketType", "is None: user = \"gurkult\" else: user, _, repository =", "else: user, _, repository = repository.rpartition(\"/\") if user == \"\":", "= _issues.Issues(self.bot.http_session) if not numbers: raise commands.MissingRequiredArgument(ctx.command.clean_params[\"numbers\"]) if repository is", "== \"\": user = \"gurkult\" embed = await github_issue.issue(ctx.message.channel, numbers,", "aliases=(\"src\", \"inspect\")) async def source_command( self, ctx: commands.Context, *, source_item:", "└ source Displays information about the bot's source code. \"\"\"", "to GitHub]({BOT_REPO_URL})\") embed.set_thumbnail(url=self.bot.user.avatar_url) await ctx.send(embed=embed) return elif not ctx.bot.get_command(source_item): raise", "command or Cog.\" ) github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url) embed =", "\"\"\" Fetches a user's GitHub information. Username is optional and", "ctx.send(embed=embed) @github_group.command(name=\"source\", aliases=(\"src\", \"inspect\")) async def source_command( self, ctx: commands.Context,", "information. Username is optional and sends the help command if", "bot.constants import BOT_REPO_URL from discord import Embed from discord.ext import", "None ) -> None: \"\"\"Displays information about the bot's source", ") github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url) embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item)) await", "about the bot's source code. \"\"\" def __init__(self, bot: commands.Bot)", "await ctx.send(embed=embed) @github_group.command(name=\"source\", aliases=(\"src\", \"inspect\")) async def source_command( self, ctx:", "self, ctx: commands.Context, *, source_item: typing.Optional[str] = None ) ->", "-> None: self.bot = bot @commands.group(name=\"github\", aliases=(\"gh\",), invoke_without_command=True) async def", "numbers: raise commands.MissingRequiredArgument(ctx.command.clean_params[\"numbers\"]) if repository is None: user = \"gurkult\"", "= bot @commands.group(name=\"github\", aliases=(\"gh\",), invoke_without_command=True) async def github_group(self, ctx: commands.Context)", "Github.\"\"\" await ctx.send_help(ctx.command) @github_group.command(name=\"profile\") @commands.cooldown(1, 10, BucketType.user) async def profile(self,", "commands.MissingRequiredArgument(ctx.command.clean_params[\"numbers\"]) if repository is None: user = \"gurkult\" else: user,", "_issues, _profile, _source class Github(commands.Cog): \"\"\" Github Category cog, which", "= \"gurkult\" embed = await github_issue.issue(ctx.message.channel, numbers, repository, user) await", "ctx.bot.get_command(source_item): raise commands.BadArgument( f\"Unable to convert `{source_item}` to valid command", "source code. \"\"\" def __init__(self, bot: commands.Bot) -> None: self.bot", "= await github_source.inspect(cmd=ctx.bot.get_command(source_item)) await ctx.send(embed=embed) def setup(bot: commands.Bot) -> None:", "repository.\"\"\" github_issue = _issues.Issues(self.bot.http_session) if not numbers: raise commands.MissingRequiredArgument(ctx.command.clean_params[\"numbers\"]) if", "not specified. \"\"\" github_profile = _profile.GithubInfo(self.bot.http_session) embed = await github_profile.get_github_info(username)", "Command to retrieve issue(s) from a GitHub repository. └ source", "GitHub information. Username is optional and sends the help command", "help command if not specified. \"\"\" github_profile = _profile.GithubInfo(self.bot.http_session) embed", "_issues.Issues(self.bot.http_session) if not numbers: raise commands.MissingRequiredArgument(ctx.command.clean_params[\"numbers\"]) if repository is None:", "\"\": user = \"gurkult\" embed = await github_issue.issue(ctx.message.channel, numbers, repository,", "cog, which contains commands related to github. Commands: ├ profile", "Category cog, which contains commands related to github. Commands: ├", "user = \"gurkult\" else: user, _, repository = repository.rpartition(\"/\") if", "ctx: commands.Context, *, source_item: typing.Optional[str] = None ) -> None:", ") -> None: \"\"\"Displays information about the bot's source code.\"\"\"", "\"gurkult\" embed = await github_issue.issue(ctx.message.channel, numbers, repository, user) await ctx.send(embed=embed)", "Displays information about the bot's source code. \"\"\" def __init__(self,", "@github_group.command(name=\"profile\") @commands.cooldown(1, 10, BucketType.user) async def profile(self, ctx: commands.Context, username:", "repository is None: user = \"gurkult\" else: user, _, repository", "Cog.\" ) github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url) embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item))", "valid command or Cog.\" ) github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url) embed", "ctx: commands.Context) -> None: \"\"\"Commands for Github.\"\"\" await ctx.send_help(ctx.command) @github_group.command(name=\"profile\")", "\"\"\"Displays information about the bot's source code.\"\"\" if source_item is", "discord.ext import commands from discord.ext.commands.cooldowns import BucketType from . import", "aliases=(\"pr\",)) async def issue( self, ctx: commands.Context, numbers: commands.Greedy[int], repository:", "value=f\"[Go to GitHub]({BOT_REPO_URL})\") embed.set_thumbnail(url=self.bot.user.avatar_url) await ctx.send(embed=embed) return elif not ctx.bot.get_command(source_item):", "-> None: \"\"\"Commands for Github.\"\"\" await ctx.send_help(ctx.command) @github_group.command(name=\"profile\") @commands.cooldown(1, 10,", "Username is optional and sends the help command if not", "embed.set_thumbnail(url=self.bot.user.avatar_url) await ctx.send(embed=embed) return elif not ctx.bot.get_command(source_item): raise commands.BadArgument( f\"Unable", "Github(commands.Cog): \"\"\" Github Category cog, which contains commands related to", "await ctx.send(embed=embed) def setup(bot: commands.Bot) -> None: \"\"\"Load the Github", "_, repository = repository.rpartition(\"/\") if user == \"\": user =", "\"gurkult\" else: user, _, repository = repository.rpartition(\"/\") if user ==", "to github. Commands: ├ profile Fetches a user's GitHub information.", "commands.Context, *, source_item: typing.Optional[str] = None ) -> None: \"\"\"Displays", "if source_item is None: embed = Embed(title=\"Gurkbot's GitHub Repository\") embed.add_field(name=\"Repository\",", "numbers: commands.Greedy[int], repository: typing.Optional[str] = None, ) -> None: \"\"\"Command", "@commands.group(name=\"github\", aliases=(\"gh\",), invoke_without_command=True) async def github_group(self, ctx: commands.Context) -> None:", "commands.Context) -> None: \"\"\"Commands for Github.\"\"\" await ctx.send_help(ctx.command) @github_group.command(name=\"profile\") @commands.cooldown(1,", "_profile, _source class Github(commands.Cog): \"\"\" Github Category cog, which contains", "from discord.ext.commands.cooldowns import BucketType from . import _issues, _profile, _source", "sends the help command if not specified. \"\"\" github_profile =", "= repository.rpartition(\"/\") if user == \"\": user = \"gurkult\" embed", "None, ) -> None: \"\"\"Command to retrieve issue(s) from a", "typing.Optional[str] = None ) -> None: \"\"\"Displays information about the", "self.bot.user.avatar_url) embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item)) await ctx.send(embed=embed) def setup(bot: commands.Bot)", "10, BucketType.user) async def profile(self, ctx: commands.Context, username: str) ->", "import BucketType from . import _issues, _profile, _source class Github(commands.Cog):", "def setup(bot: commands.Bot) -> None: \"\"\"Load the Github cog.\"\"\" bot.add_cog(Github(bot))", "user) await ctx.send(embed=embed) @github_group.command(name=\"source\", aliases=(\"src\", \"inspect\")) async def source_command( self,", "ctx: commands.Context, username: str) -> None: \"\"\" Fetches a user's", "from discord.ext import commands from discord.ext.commands.cooldowns import BucketType from .", "source code.\"\"\" if source_item is None: embed = Embed(title=\"Gurkbot's GitHub", "\"\"\" Github Category cog, which contains commands related to github.", "for Github.\"\"\" await ctx.send_help(ctx.command) @github_group.command(name=\"profile\") @commands.cooldown(1, 10, BucketType.user) async def", "and sends the help command if not specified. \"\"\" github_profile", "commands from discord.ext.commands.cooldowns import BucketType from . import _issues, _profile,", "\"\"\"Command to retrieve issue(s) from a GitHub repository.\"\"\" github_issue =", "Embed from discord.ext import commands from discord.ext.commands.cooldowns import BucketType from", "bot: commands.Bot) -> None: self.bot = bot @commands.group(name=\"github\", aliases=(\"gh\",), invoke_without_command=True)", "GitHub Repository\") embed.add_field(name=\"Repository\", value=f\"[Go to GitHub]({BOT_REPO_URL})\") embed.set_thumbnail(url=self.bot.user.avatar_url) await ctx.send(embed=embed) return", "bot's source code. \"\"\" def __init__(self, bot: commands.Bot) -> None:", "information about the bot's source code. \"\"\" def __init__(self, bot:", "import BOT_REPO_URL from discord import Embed from discord.ext import commands", "information. ├ issue Command to retrieve issue(s) from a GitHub", "self, ctx: commands.Context, numbers: commands.Greedy[int], repository: typing.Optional[str] = None, )", "github_source.inspect(cmd=ctx.bot.get_command(source_item)) await ctx.send(embed=embed) def setup(bot: commands.Bot) -> None: \"\"\"Load the", "None: \"\"\" Fetches a user's GitHub information. Username is optional", "embed = await github_profile.get_github_info(username) await ctx.send(embed=embed) @github_group.command(name=\"issue\", aliases=(\"pr\",)) async def", "await github_source.inspect(cmd=ctx.bot.get_command(source_item)) await ctx.send(embed=embed) def setup(bot: commands.Bot) -> None: \"\"\"Load", "_source.Source(self.bot.http_session, self.bot.user.avatar_url) embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item)) await ctx.send(embed=embed) def setup(bot:", "None: user = \"gurkult\" else: user, _, repository = repository.rpartition(\"/\")", "typing.Optional[str] = None, ) -> None: \"\"\"Command to retrieve issue(s)", "specified. \"\"\" github_profile = _profile.GithubInfo(self.bot.http_session) embed = await github_profile.get_github_info(username) await", "import commands from discord.ext.commands.cooldowns import BucketType from . import _issues,", "to retrieve issue(s) from a GitHub repository.\"\"\" github_issue = _issues.Issues(self.bot.http_session)", "embed = await github_issue.issue(ctx.message.channel, numbers, repository, user) await ctx.send(embed=embed) @github_group.command(name=\"source\",", "the bot's source code.\"\"\" if source_item is None: embed =", "None: \"\"\"Displays information about the bot's source code.\"\"\" if source_item", "is None: embed = Embed(title=\"Gurkbot's GitHub Repository\") embed.add_field(name=\"Repository\", value=f\"[Go to", "ctx.send(embed=embed) return elif not ctx.bot.get_command(source_item): raise commands.BadArgument( f\"Unable to convert", "code. \"\"\" def __init__(self, bot: commands.Bot) -> None: self.bot =", "github_profile.get_github_info(username) await ctx.send(embed=embed) @github_group.command(name=\"issue\", aliases=(\"pr\",)) async def issue( self, ctx:", "self.bot = bot @commands.group(name=\"github\", aliases=(\"gh\",), invoke_without_command=True) async def github_group(self, ctx:", "bot's source code.\"\"\" if source_item is None: embed = Embed(title=\"Gurkbot's", "commands.BadArgument( f\"Unable to convert `{source_item}` to valid command or Cog.\"", "retrieve issue(s) from a GitHub repository.\"\"\" github_issue = _issues.Issues(self.bot.http_session) if", "async def github_group(self, ctx: commands.Context) -> None: \"\"\"Commands for Github.\"\"\"", "ctx: commands.Context, numbers: commands.Greedy[int], repository: typing.Optional[str] = None, ) ->", "embed = Embed(title=\"Gurkbot's GitHub Repository\") embed.add_field(name=\"Repository\", value=f\"[Go to GitHub]({BOT_REPO_URL})\") embed.set_thumbnail(url=self.bot.user.avatar_url)", "├ profile Fetches a user's GitHub information. ├ issue Command", "= None, ) -> None: \"\"\"Command to retrieve issue(s) from", "the bot's source code. \"\"\" def __init__(self, bot: commands.Bot) ->", "command if not specified. \"\"\" github_profile = _profile.GithubInfo(self.bot.http_session) embed =", "GitHub]({BOT_REPO_URL})\") embed.set_thumbnail(url=self.bot.user.avatar_url) await ctx.send(embed=embed) return elif not ctx.bot.get_command(source_item): raise commands.BadArgument(", "to convert `{source_item}` to valid command or Cog.\" ) github_source", "`{source_item}` to valid command or Cog.\" ) github_source = _source.Source(self.bot.http_session,", "_source class Github(commands.Cog): \"\"\" Github Category cog, which contains commands", "or Cog.\" ) github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url) embed = await", "github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url) embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item)) await ctx.send(embed=embed)", "def issue( self, ctx: commands.Context, numbers: commands.Greedy[int], repository: typing.Optional[str] =", "-> None: \"\"\" Fetches a user's GitHub information. Username is", "Fetches a user's GitHub information. Username is optional and sends", "repository = repository.rpartition(\"/\") if user == \"\": user = \"gurkult\"", "async def source_command( self, ctx: commands.Context, *, source_item: typing.Optional[str] =", "ctx.send(embed=embed) @github_group.command(name=\"issue\", aliases=(\"pr\",)) async def issue( self, ctx: commands.Context, numbers:", "import typing from bot.constants import BOT_REPO_URL from discord import Embed", "def __init__(self, bot: commands.Bot) -> None: self.bot = bot @commands.group(name=\"github\",", "def source_command( self, ctx: commands.Context, *, source_item: typing.Optional[str] = None", "f\"Unable to convert `{source_item}` to valid command or Cog.\" )", "commands.Context, numbers: commands.Greedy[int], repository: typing.Optional[str] = None, ) -> None:", "= _source.Source(self.bot.http_session, self.bot.user.avatar_url) embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item)) await ctx.send(embed=embed) def", "github_group(self, ctx: commands.Context) -> None: \"\"\"Commands for Github.\"\"\" await ctx.send_help(ctx.command)", "return elif not ctx.bot.get_command(source_item): raise commands.BadArgument( f\"Unable to convert `{source_item}`", "source Displays information about the bot's source code. \"\"\" def", "ctx.send_help(ctx.command) @github_group.command(name=\"profile\") @commands.cooldown(1, 10, BucketType.user) async def profile(self, ctx: commands.Context,", "user, _, repository = repository.rpartition(\"/\") if user == \"\": user" ]
[ "**k): if '_the_instance' not in cls.__dict__: cls._the_instance = object.__new__(cls) return", "def __init__(self): self.__slack_client = SlackClient(SLACK_API_KEY) def send_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG,", "cls._the_instance def __init__(self): self.__slack_client = SlackClient(SLACK_API_KEY) def send_msg_to(self, text='', channel=DEFAULT_CHANNEL):", "channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel, text=text) if __name__ == '__main__':", "channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel, text=text) def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG,", "send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel, text=text) if __name__", "class SlackBot: API_CHAT_MSG = 'chat.postMessage' BOT_NAME = 'News Bot' DEFAULT_CHANNEL", "object.__new__(cls) return cls._the_instance def __init__(self): self.__slack_client = SlackClient(SLACK_API_KEY) def send_msg_to(self,", "self.__slack_client = SlackClient(SLACK_API_KEY) def send_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel,", "= SlackClient(SLACK_API_KEY) def send_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel, text=text)", "slackclient import SlackClient from external import SLACK_API_KEY class SlackBot: API_CHAT_MSG", "cls._the_instance = object.__new__(cls) return cls._the_instance def __init__(self): self.__slack_client = SlackClient(SLACK_API_KEY)", "API_CHAT_MSG = 'chat.postMessage' BOT_NAME = 'News Bot' DEFAULT_CHANNEL = 'news_notification'", "send_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel, text=text) def send_formatted_msg_to(self, text='',", "'News Bot' DEFAULT_CHANNEL = 'news_notification' def __new__(cls, *p, **k): if", "'news_notification' def __new__(cls, *p, **k): if '_the_instance' not in cls.__dict__:", "text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel, text=text) def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL):", "SlackClient from external import SLACK_API_KEY class SlackBot: API_CHAT_MSG = 'chat.postMessage'", "self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel, text=text) def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME,", "from external import SLACK_API_KEY class SlackBot: API_CHAT_MSG = 'chat.postMessage' BOT_NAME", "<gh_stars>0 from slackclient import SlackClient from external import SLACK_API_KEY class", "return cls._the_instance def __init__(self): self.__slack_client = SlackClient(SLACK_API_KEY) def send_msg_to(self, text='',", "text=text) def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel, text=text)", "SlackBot: API_CHAT_MSG = 'chat.postMessage' BOT_NAME = 'News Bot' DEFAULT_CHANNEL =", "*p, **k): if '_the_instance' not in cls.__dict__: cls._the_instance = object.__new__(cls)", "'_the_instance' not in cls.__dict__: cls._the_instance = object.__new__(cls) return cls._the_instance def", "DEFAULT_CHANNEL = 'news_notification' def __new__(cls, *p, **k): if '_the_instance' not", "external import SLACK_API_KEY class SlackBot: API_CHAT_MSG = 'chat.postMessage' BOT_NAME =", "in cls.__dict__: cls._the_instance = object.__new__(cls) return cls._the_instance def __init__(self): self.__slack_client", "BOT_NAME = 'News Bot' DEFAULT_CHANNEL = 'news_notification' def __new__(cls, *p,", "= object.__new__(cls) return cls._the_instance def __init__(self): self.__slack_client = SlackClient(SLACK_API_KEY) def", "SLACK_API_KEY class SlackBot: API_CHAT_MSG = 'chat.postMessage' BOT_NAME = 'News Bot'", "def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel, text=text) if", "__new__(cls, *p, **k): if '_the_instance' not in cls.__dict__: cls._the_instance =", "= 'news_notification' def __new__(cls, *p, **k): if '_the_instance' not in", "'chat.postMessage' BOT_NAME = 'News Bot' DEFAULT_CHANNEL = 'news_notification' def __new__(cls,", "channel=channel, text=text) def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel,", "text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel, text=text) if __name__ ==", "def __new__(cls, *p, **k): if '_the_instance' not in cls.__dict__: cls._the_instance", "__init__(self): self.__slack_client = SlackClient(SLACK_API_KEY) def send_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME,", "def send_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel, text=text) def send_formatted_msg_to(self,", "not in cls.__dict__: cls._the_instance = object.__new__(cls) return cls._the_instance def __init__(self):", "import SLACK_API_KEY class SlackBot: API_CHAT_MSG = 'chat.postMessage' BOT_NAME = 'News", "import SlackClient from external import SLACK_API_KEY class SlackBot: API_CHAT_MSG =", "Bot' DEFAULT_CHANNEL = 'news_notification' def __new__(cls, *p, **k): if '_the_instance'", "= 'chat.postMessage' BOT_NAME = 'News Bot' DEFAULT_CHANNEL = 'news_notification' def", "username=SlackBot.BOT_NAME, channel=channel, text=text) def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True,", "username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel, text=text) if __name__ == '__main__': SlackBot().send_msg_to('hello world!!')", "self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel, text=text) if __name__ == '__main__': SlackBot().send_msg_to('hello", "cls.__dict__: cls._the_instance = object.__new__(cls) return cls._the_instance def __init__(self): self.__slack_client =", "if '_the_instance' not in cls.__dict__: cls._the_instance = object.__new__(cls) return cls._the_instance", "from slackclient import SlackClient from external import SLACK_API_KEY class SlackBot:", "SlackClient(SLACK_API_KEY) def send_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel, text=text) def", "= 'News Bot' DEFAULT_CHANNEL = 'news_notification' def __new__(cls, *p, **k):" ]
[ "forge_priority(priority: int) -> bytes: return priority.to_bytes(2, 'big') def forge_content(content: Dict[str,", "forge_block_header(shell_header: Dict[str, Any]) -> bytes: res = forge_int_fixed(shell_header['level'], 4) res", "+= b'\\xFF' if protocol_data['liquidity_baking_escape_vote'] else b'\\x00' return res def forge_block_header(shell_header:", "4) res += forge_int_fixed(shell_header['proto'], 1) res += forge_base58(shell_header['predecessor']) res +=", "'big') + 1 return major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex() def forge_int_fixed(value:", "== 'activate': return b'\\x00' raise NotImplementedError(command) def forge_fitness(fitness: List[str]) ->", "Dict[str, Any]) -> bytes: res = forge_int_fixed(shell_header['level'], 4) res +=", "def forge_protocol_data(protocol_data: Dict[str, Any]) -> bytes: res = b'' if", "0 minor = 1 else: major = int.from_bytes(bytes.fromhex(fitness[0]), 'big') minor", "'activate': return b'\\x00' raise NotImplementedError(command) def forge_fitness(fitness: List[str]) -> bytes:", "bytes: res = b'' if protocol_data.get('content'): res += forge_content(protocol_data['content']) else:", "+= forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8) res += forge_int_fixed(shell_header['validation_pass'], 1) res += forge_base58(shell_header['operations_hash'])", "= b'' if protocol_data.get('content'): res += forge_content(protocol_data['content']) else: res +=", "b'\\xFF' if protocol_data['liquidity_baking_escape_vote'] else b'\\x00' return res def forge_block_header(shell_header: Dict[str,", "= int.from_bytes(bytes.fromhex(fitness[0]), 'big') minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1 return", "int.from_bytes(bytes.fromhex(fitness[0]), 'big') minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1 return major.to_bytes(1,", "forge_fitness(fitness: List[str]) -> bytes: return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness))) def", "Dict[str, Any]) -> bytes: res = b'' res += forge_command(content['command'])", "res += b'\\xFF' res += forge_base58(protocol_data['seed_nonce_hash']) else: res += b'\\x00'", "from typing import Any, Dict, List, Tuple from pytezos.michelson.forge import", "forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness))) def forge_priority(priority: int) -> bytes: return", "b'\\xFF' res += forge_base58(protocol_data['seed_nonce_hash']) else: res += b'\\x00' res +=", "-> bytes: return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness))) def forge_priority(priority: int)", "1) res += forge_base58(shell_header['predecessor']) res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8) res +=", "0: major = 0 minor = 1 else: major =", "bump_fitness(fitness: Tuple[str, str]) -> Tuple[str, str]: if len(fitness) == 0:", "+ 1 return major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex() def forge_int_fixed(value: int,", "bytes: if command == 'activate': return b'\\x00' raise NotImplementedError(command) def", "return value.to_bytes(length, 'big') def forge_command(command: str) -> bytes: if command", "def forge_priority(priority: int) -> bytes: return priority.to_bytes(2, 'big') def forge_content(content:", "'big').hex(), minor.to_bytes(8, 'big').hex() def forge_int_fixed(value: int, length: int) -> bytes:", "forge_int_fixed(shell_header['proto'], 1) res += forge_base58(shell_header['predecessor']) res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8) res", "major = 0 minor = 1 else: major = int.from_bytes(bytes.fromhex(fitness[0]),", "res += b'\\x00' res += b'\\xFF' if protocol_data['liquidity_baking_escape_vote'] else b'\\x00'", "res = forge_int_fixed(shell_header['level'], 4) res += forge_int_fixed(shell_header['proto'], 1) res +=", "== 0: major = 0 minor = 1 else: major", "return b'\\x00' raise NotImplementedError(command) def forge_fitness(fitness: List[str]) -> bytes: return", "Tuple[str, str]) -> Tuple[str, str]: if len(fitness) == 0: major", "forge_priority(protocol_data['priority']) res += bytes.fromhex(protocol_data['proof_of_work_nonce']) if protocol_data.get('seed_nonce_hash'): res += b'\\xFF' res", "return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness))) def forge_priority(priority: int) -> bytes:", "forge_int_fixed(value: int, length: int) -> bytes: return value.to_bytes(length, 'big') def", "res += forge_base58(shell_header['predecessor']) res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8) res += forge_int_fixed(shell_header['validation_pass'],", "List[str]) -> bytes: return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness))) def forge_priority(priority:", "str]) -> Tuple[str, str]: if len(fitness) == 0: major =", "len(fitness) == 0: major = 0 minor = 1 else:", "int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1 return major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex() def", "from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp def bump_fitness(fitness: Tuple[str, str])", "major = int.from_bytes(bytes.fromhex(fitness[0]), 'big') minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1", "length: int) -> bytes: return value.to_bytes(length, 'big') def forge_command(command: str)", "-> bytes: return priority.to_bytes(2, 'big') def forge_content(content: Dict[str, Any]) ->", "res += bytes.fromhex(content['protocol_parameters']) return res def forge_protocol_data(protocol_data: Dict[str, Any]) ->", "b'' if protocol_data.get('content'): res += forge_content(protocol_data['content']) else: res += forge_priority(protocol_data['priority'])", "forge_int_fixed(shell_header['level'], 4) res += forge_int_fixed(shell_header['proto'], 1) res += forge_base58(shell_header['predecessor']) res", "List, Tuple from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp def bump_fitness(fitness:", "def forge_content(content: Dict[str, Any]) -> bytes: res = b'' res", "+= forge_command(content['command']) res += forge_base58(content['hash']) res += forge_fitness(content['fitness']) res +=", "res += forge_base58(content['hash']) res += forge_fitness(content['fitness']) res += bytes.fromhex(content['protocol_parameters']) return", "+= b'\\x00' res += b'\\xFF' if protocol_data['liquidity_baking_escape_vote'] else b'\\x00' return", "forge_content(content: Dict[str, Any]) -> bytes: res = b'' res +=", "res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8) res += forge_int_fixed(shell_header['validation_pass'], 1) res +=", "+= forge_priority(protocol_data['priority']) res += bytes.fromhex(protocol_data['proof_of_work_nonce']) if protocol_data.get('seed_nonce_hash'): res += b'\\xFF'", "+= bytes.fromhex(protocol_data['proof_of_work_nonce']) if protocol_data.get('seed_nonce_hash'): res += b'\\xFF' res += forge_base58(protocol_data['seed_nonce_hash'])", "if protocol_data['liquidity_baking_escape_vote'] else b'\\x00' return res def forge_block_header(shell_header: Dict[str, Any])", "NotImplementedError(command) def forge_fitness(fitness: List[str]) -> bytes: return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)),", "res += forge_int_fixed(shell_header['validation_pass'], 1) res += forge_base58(shell_header['operations_hash']) res += forge_fitness(shell_header['fitness'])", "typing import Any, Dict, List, Tuple from pytezos.michelson.forge import forge_array,", "= b'' res += forge_command(content['command']) res += forge_base58(content['hash']) res +=", "+= bytes.fromhex(content['protocol_parameters']) return res def forge_protocol_data(protocol_data: Dict[str, Any]) -> bytes:", "= 1 else: major = int.from_bytes(bytes.fromhex(fitness[0]), 'big') minor = int.from_bytes(bytes.fromhex(fitness[1]),", "res = b'' if protocol_data.get('content'): res += forge_content(protocol_data['content']) else: res", "value.to_bytes(length, 'big') def forge_command(command: str) -> bytes: if command ==", "forge_base58(content['hash']) res += forge_fitness(content['fitness']) res += bytes.fromhex(content['protocol_parameters']) return res def", "+= forge_int_fixed(shell_header['validation_pass'], 1) res += forge_base58(shell_header['operations_hash']) res += forge_fitness(shell_header['fitness']) res", "bytes: return priority.to_bytes(2, 'big') def forge_content(content: Dict[str, Any]) -> bytes:", "if protocol_data.get('content'): res += forge_content(protocol_data['content']) else: res += forge_priority(protocol_data['priority']) res", "protocol_data.get('content'): res += forge_content(protocol_data['content']) else: res += forge_priority(protocol_data['priority']) res +=", "+= b'\\xFF' res += forge_base58(protocol_data['seed_nonce_hash']) else: res += b'\\x00' res", "1 return major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex() def forge_int_fixed(value: int, length:", "Any]) -> bytes: res = b'' res += forge_command(content['command']) res", "int) -> bytes: return value.to_bytes(length, 'big') def forge_command(command: str) ->", "command == 'activate': return b'\\x00' raise NotImplementedError(command) def forge_fitness(fitness: List[str])", "Any]) -> bytes: res = b'' if protocol_data.get('content'): res +=", "+= forge_content(protocol_data['content']) else: res += forge_priority(protocol_data['priority']) res += bytes.fromhex(protocol_data['proof_of_work_nonce']) if", "import Any, Dict, List, Tuple from pytezos.michelson.forge import forge_array, forge_base58,", "raise NotImplementedError(command) def forge_fitness(fitness: List[str]) -> bytes: return forge_array(b''.join(map(lambda x:", "forge_base58, optimize_timestamp def bump_fitness(fitness: Tuple[str, str]) -> Tuple[str, str]: if", "forge_array(bytes.fromhex(x)), fitness))) def forge_priority(priority: int) -> bytes: return priority.to_bytes(2, 'big')", "import forge_array, forge_base58, optimize_timestamp def bump_fitness(fitness: Tuple[str, str]) -> Tuple[str,", "return res def forge_protocol_data(protocol_data: Dict[str, Any]) -> bytes: res =", "int) -> bytes: return priority.to_bytes(2, 'big') def forge_content(content: Dict[str, Any])", "res += forge_priority(protocol_data['priority']) res += bytes.fromhex(protocol_data['proof_of_work_nonce']) if protocol_data.get('seed_nonce_hash'): res +=", "def forge_command(command: str) -> bytes: if command == 'activate': return", "+= forge_base58(protocol_data['seed_nonce_hash']) else: res += b'\\x00' res += b'\\xFF' if", "protocol_data['liquidity_baking_escape_vote'] else b'\\x00' return res def forge_block_header(shell_header: Dict[str, Any]) ->", "str) -> bytes: if command == 'activate': return b'\\x00' raise", "def forge_int_fixed(value: int, length: int) -> bytes: return value.to_bytes(length, 'big')", "res += forge_command(content['command']) res += forge_base58(content['hash']) res += forge_fitness(content['fitness']) res", "+= forge_int_fixed(shell_header['proto'], 1) res += forge_base58(shell_header['predecessor']) res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8)", "res += forge_base58(protocol_data['seed_nonce_hash']) else: res += b'\\x00' res += b'\\xFF'", "res += forge_int_fixed(shell_header['proto'], 1) res += forge_base58(shell_header['predecessor']) res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']),", "protocol_data.get('seed_nonce_hash'): res += b'\\xFF' res += forge_base58(protocol_data['seed_nonce_hash']) else: res +=", "res += bytes.fromhex(protocol_data['proof_of_work_nonce']) if protocol_data.get('seed_nonce_hash'): res += b'\\xFF' res +=", "forge_protocol_data(protocol_data: Dict[str, Any]) -> bytes: res = b'' if protocol_data.get('content'):", "-> bytes: res = b'' if protocol_data.get('content'): res += forge_content(protocol_data['content'])", "res = b'' res += forge_command(content['command']) res += forge_base58(content['hash']) res", "if len(fitness) == 0: major = 0 minor = 1", "b'\\x00' return res def forge_block_header(shell_header: Dict[str, Any]) -> bytes: res", "-> bytes: res = forge_int_fixed(shell_header['level'], 4) res += forge_int_fixed(shell_header['proto'], 1)", "b'' res += forge_command(content['command']) res += forge_base58(content['hash']) res += forge_fitness(content['fitness'])", "'big') def forge_command(command: str) -> bytes: if command == 'activate':", "8) res += forge_int_fixed(shell_header['validation_pass'], 1) res += forge_base58(shell_header['operations_hash']) res +=", "return major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex() def forge_int_fixed(value: int, length: int)", "else: res += b'\\x00' res += b'\\xFF' if protocol_data['liquidity_baking_escape_vote'] else", "+= forge_fitness(shell_header['fitness']) res += forge_base58(shell_header['context']) res += bytes.fromhex(shell_header['protocol_data']) return res", "fitness))) def forge_priority(priority: int) -> bytes: return priority.to_bytes(2, 'big') def", "forge_base58(shell_header['predecessor']) res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8) res += forge_int_fixed(shell_header['validation_pass'], 1) res", "bytes: return value.to_bytes(length, 'big') def forge_command(command: str) -> bytes: if", "res def forge_protocol_data(protocol_data: Dict[str, Any]) -> bytes: res = b''", "forge_fitness(content['fitness']) res += bytes.fromhex(content['protocol_parameters']) return res def forge_protocol_data(protocol_data: Dict[str, Any])", "+= forge_base58(shell_header['predecessor']) res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8) res += forge_int_fixed(shell_header['validation_pass'], 1)", "minor = 1 else: major = int.from_bytes(bytes.fromhex(fitness[0]), 'big') minor =", "minor.to_bytes(8, 'big').hex() def forge_int_fixed(value: int, length: int) -> bytes: return", "Tuple[str, str]: if len(fitness) == 0: major = 0 minor", "-> bytes: if command == 'activate': return b'\\x00' raise NotImplementedError(command)", "= int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1 return major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex()", "forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8) res += forge_int_fixed(shell_header['validation_pass'], 1) res += forge_base58(shell_header['operations_hash']) res", "forge_array, forge_base58, optimize_timestamp def bump_fitness(fitness: Tuple[str, str]) -> Tuple[str, str]:", "forge_int_fixed(shell_header['validation_pass'], 1) res += forge_base58(shell_header['operations_hash']) res += forge_fitness(shell_header['fitness']) res +=", "res def forge_block_header(shell_header: Dict[str, Any]) -> bytes: res = forge_int_fixed(shell_header['level'],", "str]: if len(fitness) == 0: major = 0 minor =", "= 0 minor = 1 else: major = int.from_bytes(bytes.fromhex(fitness[0]), 'big')", "1) res += forge_base58(shell_header['operations_hash']) res += forge_fitness(shell_header['fitness']) res += forge_base58(shell_header['context'])", "'big').hex() def forge_int_fixed(value: int, length: int) -> bytes: return value.to_bytes(length,", "res += forge_fitness(shell_header['fitness']) res += forge_base58(shell_header['context']) res += bytes.fromhex(shell_header['protocol_data']) return", "+= forge_fitness(content['fitness']) res += bytes.fromhex(content['protocol_parameters']) return res def forge_protocol_data(protocol_data: Dict[str,", "b'\\x00' raise NotImplementedError(command) def forge_fitness(fitness: List[str]) -> bytes: return forge_array(b''.join(map(lambda", "bytes.fromhex(protocol_data['proof_of_work_nonce']) if protocol_data.get('seed_nonce_hash'): res += b'\\xFF' res += forge_base58(protocol_data['seed_nonce_hash']) else:", "= forge_int_fixed(shell_header['level'], 4) res += forge_int_fixed(shell_header['proto'], 1) res += forge_base58(shell_header['predecessor'])", "forge_base58(shell_header['operations_hash']) res += forge_fitness(shell_header['fitness']) res += forge_base58(shell_header['context']) res += bytes.fromhex(shell_header['protocol_data'])", "bytes: return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness))) def forge_priority(priority: int) ->", "Tuple from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp def bump_fitness(fitness: Tuple[str,", "bytes: res = b'' res += forge_command(content['command']) res += forge_base58(content['hash'])", "else b'\\x00' return res def forge_block_header(shell_header: Dict[str, Any]) -> bytes:", "forge_command(content['command']) res += forge_base58(content['hash']) res += forge_fitness(content['fitness']) res += bytes.fromhex(content['protocol_parameters'])", "pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp def bump_fitness(fitness: Tuple[str, str]) ->", "'big') minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1 return major.to_bytes(1, 'big').hex(),", "+= forge_base58(content['hash']) res += forge_fitness(content['fitness']) res += bytes.fromhex(content['protocol_parameters']) return res", "else: res += forge_priority(protocol_data['priority']) res += bytes.fromhex(protocol_data['proof_of_work_nonce']) if protocol_data.get('seed_nonce_hash'): res", "bytes.fromhex(content['protocol_parameters']) return res def forge_protocol_data(protocol_data: Dict[str, Any]) -> bytes: res", "res += b'\\xFF' if protocol_data['liquidity_baking_escape_vote'] else b'\\x00' return res def", "-> bytes: res = b'' res += forge_command(content['command']) res +=", "def forge_block_header(shell_header: Dict[str, Any]) -> bytes: res = forge_int_fixed(shell_header['level'], 4)", "else: major = int.from_bytes(bytes.fromhex(fitness[0]), 'big') minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big') +", "res += forge_base58(shell_header['operations_hash']) res += forge_fitness(shell_header['fitness']) res += forge_base58(shell_header['context']) res", "optimize_timestamp def bump_fitness(fitness: Tuple[str, str]) -> Tuple[str, str]: if len(fitness)", "Dict, List, Tuple from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp def", "forge_base58(protocol_data['seed_nonce_hash']) else: res += b'\\x00' res += b'\\xFF' if protocol_data['liquidity_baking_escape_vote']", "priority.to_bytes(2, 'big') def forge_content(content: Dict[str, Any]) -> bytes: res =", "res += forge_fitness(content['fitness']) res += bytes.fromhex(content['protocol_parameters']) return res def forge_protocol_data(protocol_data:", "1 else: major = int.from_bytes(bytes.fromhex(fitness[0]), 'big') minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big')", "def bump_fitness(fitness: Tuple[str, str]) -> Tuple[str, str]: if len(fitness) ==", "+= forge_base58(shell_header['operations_hash']) res += forge_fitness(shell_header['fitness']) res += forge_base58(shell_header['context']) res +=", "if protocol_data.get('seed_nonce_hash'): res += b'\\xFF' res += forge_base58(protocol_data['seed_nonce_hash']) else: res", "forge_command(command: str) -> bytes: if command == 'activate': return b'\\x00'", "minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1 return major.to_bytes(1, 'big').hex(), minor.to_bytes(8,", "-> Tuple[str, str]: if len(fitness) == 0: major = 0", "Dict[str, Any]) -> bytes: res = b'' if protocol_data.get('content'): res", "def forge_fitness(fitness: List[str]) -> bytes: return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness)))", "x: forge_array(bytes.fromhex(x)), fitness))) def forge_priority(priority: int) -> bytes: return priority.to_bytes(2,", "res += forge_content(protocol_data['content']) else: res += forge_priority(protocol_data['priority']) res += bytes.fromhex(protocol_data['proof_of_work_nonce'])", "int, length: int) -> bytes: return value.to_bytes(length, 'big') def forge_command(command:", "forge_content(protocol_data['content']) else: res += forge_priority(protocol_data['priority']) res += bytes.fromhex(protocol_data['proof_of_work_nonce']) if protocol_data.get('seed_nonce_hash'):", "major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex() def forge_int_fixed(value: int, length: int) ->", "bytes: res = forge_int_fixed(shell_header['level'], 4) res += forge_int_fixed(shell_header['proto'], 1) res", "Any]) -> bytes: res = forge_int_fixed(shell_header['level'], 4) res += forge_int_fixed(shell_header['proto'],", "if command == 'activate': return b'\\x00' raise NotImplementedError(command) def forge_fitness(fitness:", "Any, Dict, List, Tuple from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp", "return priority.to_bytes(2, 'big') def forge_content(content: Dict[str, Any]) -> bytes: res", "return res def forge_block_header(shell_header: Dict[str, Any]) -> bytes: res =", "b'\\x00' res += b'\\xFF' if protocol_data['liquidity_baking_escape_vote'] else b'\\x00' return res", "'big') def forge_content(content: Dict[str, Any]) -> bytes: res = b''", "-> bytes: return value.to_bytes(length, 'big') def forge_command(command: str) -> bytes:" ]
[ "np import math import sys import paddle.compat as cpt from", "2.0 (the \"License\"); # you may not use this file", "'Argmax': self.argmaxes} def init_test_case(self): self.batch_size = 3 self.channels = 3", "* self.spatial_scale)) roi_end_h = int(cpt.round(roi[4] * self.spatial_scale)) roi_height = int(max(roi_end_h", "roi_start_h, 0), self.height) wstart = min(max(wstart + roi_start_w, 0), self.width)", "pw] = -sys.float_info.max argmax_data[i, c, ph, pw] = -1 for", "set_data(self): self.init_test_case() self.make_rois() self.calc_roi_pool() self.inputs = {'X': self.x, 'ROIs': (self.rois[:,", "4 # n, c, h, w self.x_dim = (self.batch_size, self.channels,", "argmax_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) for i in range(self.rois_num):", "// self.spatial_scale - self.pooled_height) x2 = np.random.random_integers(x1 + self.pooled_width, self.width", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "/ 4.0 self.pooled_height = 2 self.pooled_width = 2 self.x =", "self.width) self.spatial_scale = 1.0 / 4.0 self.pooled_height = 2 self.pooled_width", "0), self.width) wend = min(max(wend + roi_start_w, 0), self.width) is_empty", "ph, pw] = -sys.float_info.max argmax_data[i, c, ph, pw] = -1", "y1 + self.pooled_height, self.height // self.spatial_scale) roi = [bno, x1,", "self.spatial_scale)) roi_start_h = int(cpt.round(roi[2] * self.spatial_scale)) roi_end_w = int(cpt.round(roi[3] *", "self.x = np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self): out_data = np.zeros((self.rois_num, self.channels, self.pooled_height,", "min(max(hend + roi_start_h, 0), self.height) wstart = min(max(wstart + roi_start_w,", "cpt from op_test import OpTest class TestROIPoolOp(OpTest): def set_data(self): self.init_test_case()", "self.spatial_scale - self.pooled_height) x2 = np.random.random_integers(x1 + self.pooled_width, self.width //", "self.height // self.spatial_scale) roi = [bno, x1, y1, x2, y2]", "y1, x2, y2] rois.append(roi) self.rois_num = len(rois) self.rois = np.array(rois).astype(\"int64\")", "float(self.pooled_height) bin_size_w = float(roi_width) / float(self.pooled_width) for c in range(self.channels):", "in range(bno + 1): x1 = np.random.random_integers( 0, self.width //", "range(self.rois_num): roi = self.rois[i] roi_batch_id = roi[0] roi_start_w = int(cpt.round(roi[1]", "= argmax_data.astype('int64') def make_rois(self): rois = [] self.rois_lod = [[]]", "language governing permissions and # limitations under the License. from", "= int(cpt.round(roi[2] * self.spatial_scale)) roi_end_w = int(cpt.round(roi[3] * self.spatial_scale)) roi_end_h", "use this file except in compliance with the License. #", "// self.spatial_scale - self.pooled_width) y1 = np.random.random_integers( 0, self.height //", "OpTest class TestROIPoolOp(OpTest): def set_data(self): self.init_test_case() self.make_rois() self.calc_roi_pool() self.inputs =", "def init_test_case(self): self.batch_size = 3 self.channels = 3 self.height =", "self.pooled_width)) for i in range(self.rois_num): roi = self.rois[i] roi_batch_id =", "self.pooled_height, self.pooled_width)) for i in range(self.rois_num): roi = self.rois[i] roi_batch_id", "np.random.random_integers(x1 + self.pooled_width, self.width // self.spatial_scale) y2 = np.random.random_integers( y1", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "# limitations under the License. from __future__ import print_function import", "License. # You may obtain a copy of the License", "self.outputs = {'Out': self.outs, 'Argmax': self.argmaxes} def init_test_case(self): self.batch_size =", "out_data[i, c, ph, pw] = -sys.float_info.max argmax_data[i, c, ph, pw]", "under the License is distributed on an \"AS IS\" BASIS,", "= 6 self.width = 4 # n, c, h, w", "License for the specific language governing permissions and # limitations", "or (wend <= wstart) if is_empty: out_data[i, c, ph, pw]", "hstart) or (wend <= wstart) if is_empty: out_data[i, c, ph,", "Reserved. # # Licensed under the Apache License, Version 2.0", "self.height) wstart = min(max(wstart + roi_start_w, 0), self.width) wend =", "argmax_data[i, c, ph, pw] = h * self.width + w", "in range(self.channels): for ph in range(self.pooled_height): for pw in range(self.pooled_width):", "w] argmax_data[i, c, ph, pw] = h * self.width +", "self.width) wend = min(max(wend + roi_start_w, 0), self.width) is_empty =", "in range(wstart, wend): if x_i[c, h, w] > out_data[i, c,", "= np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self): out_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width))", "= len(rois) self.rois = np.array(rois).astype(\"int64\") def setUp(self): self.op_type = \"roi_pool\"", "+ roi_start_w, 0), self.width) is_empty = (hend <= hstart) or", "[[]] for bno in range(self.batch_size): self.rois_lod[0].append(bno + 1) for i", "in compliance with the License. # You may obtain a", "software # distributed under the License is distributed on an", "np.random.random_integers( 0, self.width // self.spatial_scale - self.pooled_width) y1 = np.random.random_integers(", "= 2 self.x = np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self): out_data = np.zeros((self.rois_num,", "bin_size_h)) wstart = int(math.floor(pw * bin_size_w)) hend = int(math.ceil((ph +", "* self.spatial_scale)) roi_height = int(max(roi_end_h - roi_start_h + 1, 1))", "self.pooled_width = 2 self.x = np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self): out_data =", "c, ph, pw] = -1 for h in range(hstart, hend):", "= int(math.floor(ph * bin_size_h)) wstart = int(math.floor(pw * bin_size_w)) hend", "out_data[i, c, ph, pw] = x_i[c, h, w] argmax_data[i, c,", "\"roi_pool\" self.set_data() def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') if", "h, w] > out_data[i, c, ph, pw]: out_data[i, c, ph,", "self.height // self.spatial_scale - self.pooled_height) x2 = np.random.random_integers(x1 + self.pooled_width,", "class TestROIPoolOp(OpTest): def set_data(self): self.init_test_case() self.make_rois() self.calc_roi_pool() self.inputs = {'X':", "self.inputs = {'X': self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)} self.attrs =", "and # limitations under the License. from __future__ import print_function", "0), self.height) hend = min(max(hend + roi_start_h, 0), self.height) wstart", "range(bno + 1): x1 = np.random.random_integers( 0, self.width // self.spatial_scale", "unittest import numpy as np import math import sys import", "self.rois[i] roi_batch_id = roi[0] roi_start_w = int(cpt.round(roi[1] * self.spatial_scale)) roi_start_h", "roi_batch_id = roi[0] roi_start_w = int(cpt.round(roi[1] * self.spatial_scale)) roi_start_h =", "x_i = self.x[roi_batch_id] bin_size_h = float(roi_height) / float(self.pooled_height) bin_size_w =", "self.pooled_height, 'pooled_width': self.pooled_width } self.outputs = {'Out': self.outs, 'Argmax': self.argmaxes}", "pw] = -1 for h in range(hstart, hend): for w", "w in range(wstart, wend): if x_i[c, h, w] > out_data[i,", "argmax_data.astype('int64') def make_rois(self): rois = [] self.rois_lod = [[]] for", "x2, y2] rois.append(roi) self.rois_num = len(rois) self.rois = np.array(rois).astype(\"int64\") def", "roi = [bno, x1, y1, x2, y2] rois.append(roi) self.rois_num =", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "x1, y1, x2, y2] rois.append(roi) self.rois_num = len(rois) self.rois =", "= np.random.random_integers( 0, self.height // self.spatial_scale - self.pooled_height) x2 =", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "int(cpt.round(roi[1] * self.spatial_scale)) roi_start_h = int(cpt.round(roi[2] * self.spatial_scale)) roi_end_w =", "self.outs, 'Argmax': self.argmaxes} def init_test_case(self): self.batch_size = 3 self.channels =", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "[] self.rois_lod = [[]] for bno in range(self.batch_size): self.rois_lod[0].append(bno +", "to in writing, software # distributed under the License is", "import math import sys import paddle.compat as cpt from op_test", "out_data.astype('float32') self.argmaxes = argmax_data.astype('int64') def make_rois(self): rois = [] self.rois_lod", "op_test import OpTest class TestROIPoolOp(OpTest): def set_data(self): self.init_test_case() self.make_rois() self.calc_roi_pool()", "# See the License for the specific language governing permissions", "} self.outputs = {'Out': self.outs, 'Argmax': self.argmaxes} def init_test_case(self): self.batch_size", "or agreed to in writing, software # distributed under the", "def set_data(self): self.init_test_case() self.make_rois() self.calc_roi_pool() self.inputs = {'X': self.x, 'ROIs':", "required by applicable law or agreed to in writing, software", "range(self.channels): for ph in range(self.pooled_height): for pw in range(self.pooled_width): hstart", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "with the License. # You may obtain a copy of", "1) * bin_size_w)) hstart = min(max(hstart + roi_start_h, 0), self.height)", "1, 1)) roi_width = int(max(roi_end_w - roi_start_w + 1, 1))", "c, ph, pw]: out_data[i, c, ph, pw] = x_i[c, h,", "= [bno, x1, y1, x2, y2] rois.append(roi) self.rois_num = len(rois)", "1)) roi_width = int(max(roi_end_w - roi_start_w + 1, 1)) x_i", "1)) x_i = self.x[roi_batch_id] bin_size_h = float(roi_height) / float(self.pooled_height) bin_size_w", "range(wstart, wend): if x_i[c, h, w] > out_data[i, c, ph,", "math import sys import paddle.compat as cpt from op_test import", "TestROIPoolOp(OpTest): def set_data(self): self.init_test_case() self.make_rois() self.calc_roi_pool() self.inputs = {'X': self.x,", "self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') if __name__ == '__main__': unittest.main()", "# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. #", "range(hstart, hend): for w in range(wstart, wend): if x_i[c, h,", "compliance with the License. # You may obtain a copy", "All Rights Reserved. # # Licensed under the Apache License,", "agreed to in writing, software # distributed under the License", "self.rois_lod = [[]] for bno in range(self.batch_size): self.rois_lod[0].append(bno + 1)", "self.batch_size = 3 self.channels = 3 self.height = 6 self.width", "distributed under the License is distributed on an \"AS IS\"", "1): x1 = np.random.random_integers( 0, self.width // self.spatial_scale - self.pooled_width)", "range(self.pooled_height): for pw in range(self.pooled_width): hstart = int(math.floor(ph * bin_size_h))", "= \"roi_pool\" self.set_data() def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out')", "for i in range(self.rois_num): roi = self.rois[i] roi_batch_id = roi[0]", "self.channels, self.pooled_height, self.pooled_width)) argmax_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) for", "ph, pw]: out_data[i, c, ph, pw] = x_i[c, h, w]", "'ROIs': (self.rois[:, 1:5], self.rois_lod)} self.attrs = { 'spatial_scale': self.spatial_scale, 'pooled_height':", "= -1 for h in range(hstart, hend): for w in", "= int(max(roi_end_w - roi_start_w + 1, 1)) x_i = self.x[roi_batch_id]", "express or implied. # See the License for the specific", "import unittest import numpy as np import math import sys", "wstart = min(max(wstart + roi_start_w, 0), self.width) wend = min(max(wend", "out_data[i, c, ph, pw]: out_data[i, c, ph, pw] = x_i[c,", "except in compliance with the License. # You may obtain", "for pw in range(self.pooled_width): hstart = int(math.floor(ph * bin_size_h)) wstart", "1) * bin_size_h)) wend = int(math.ceil((pw + 1) * bin_size_w))", "roi_start_h, 0), self.height) hend = min(max(hend + roi_start_h, 0), self.height)", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "self.height, self.width) self.spatial_scale = 1.0 / 4.0 self.pooled_height = 2", "is_empty = (hend <= hstart) or (wend <= wstart) if", "not use this file except in compliance with the License.", "roi_start_w, 0), self.width) is_empty = (hend <= hstart) or (wend", "2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under", "def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') if __name__ ==", "writing, software # distributed under the License is distributed on", "1:5], self.rois_lod)} self.attrs = { 'spatial_scale': self.spatial_scale, 'pooled_height': self.pooled_height, 'pooled_width':", "* bin_size_w)) hstart = min(max(hstart + roi_start_h, 0), self.height) hend", "self.pooled_height) x2 = np.random.random_integers(x1 + self.pooled_width, self.width // self.spatial_scale) y2", "import OpTest class TestROIPoolOp(OpTest): def set_data(self): self.init_test_case() self.make_rois() self.calc_roi_pool() self.inputs", "you may not use this file except in compliance with", "self.pooled_height = 2 self.pooled_width = 2 self.x = np.random.random(self.x_dim).astype('float32') def", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "self.x_dim = (self.batch_size, self.channels, self.height, self.width) self.spatial_scale = 1.0 /", "bin_size_h)) wend = int(math.ceil((pw + 1) * bin_size_w)) hstart =", "ph, pw] = 0 else: out_data[i, c, ph, pw] =", "= np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) for i in range(self.rois_num): roi", "= int(math.floor(pw * bin_size_w)) hend = int(math.ceil((ph + 1) *", "'pooled_width': self.pooled_width } self.outputs = {'Out': self.outs, 'Argmax': self.argmaxes} def", "CONDITIONS OF ANY KIND, either express or implied. # See", "range(self.batch_size): self.rois_lod[0].append(bno + 1) for i in range(bno + 1):", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "= h * self.width + w self.outs = out_data.astype('float32') self.argmaxes", "self.pooled_width)) argmax_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) for i in", "self.init_test_case() self.make_rois() self.calc_roi_pool() self.inputs = {'X': self.x, 'ROIs': (self.rois[:, 1:5],", "self.rois = np.array(rois).astype(\"int64\") def setUp(self): self.op_type = \"roi_pool\" self.set_data() def", "0), self.height) wstart = min(max(wstart + roi_start_w, 0), self.width) wend", "self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)} self.attrs = { 'spatial_scale': self.spatial_scale,", "1) for i in range(bno + 1): x1 = np.random.random_integers(", "hstart = int(math.floor(ph * bin_size_h)) wstart = int(math.floor(pw * bin_size_w))", "self.height) hend = min(max(hend + roi_start_h, 0), self.height) wstart =", "in range(self.pooled_height): for pw in range(self.pooled_width): hstart = int(math.floor(ph *", "bin_size_w)) hstart = min(max(hstart + roi_start_h, 0), self.height) hend =", "- self.pooled_height) x2 = np.random.random_integers(x1 + self.pooled_width, self.width // self.spatial_scale)", "'spatial_scale': self.spatial_scale, 'pooled_height': self.pooled_height, 'pooled_width': self.pooled_width } self.outputs = {'Out':", "1, 1)) x_i = self.x[roi_batch_id] bin_size_h = float(roi_height) / float(self.pooled_height)", "c, h, w self.x_dim = (self.batch_size, self.channels, self.height, self.width) self.spatial_scale", "= 1.0 / 4.0 self.pooled_height = 2 self.pooled_width = 2", "= float(roi_height) / float(self.pooled_height) bin_size_w = float(roi_width) / float(self.pooled_width) for", "wstart = int(math.floor(pw * bin_size_w)) hend = int(math.ceil((ph + 1)", "float(roi_height) / float(self.pooled_height) bin_size_w = float(roi_width) / float(self.pooled_width) for c", "self.width + w self.outs = out_data.astype('float32') self.argmaxes = argmax_data.astype('int64') def", "OR CONDITIONS OF ANY KIND, either express or implied. #", "ph, pw] = h * self.width + w self.outs =", "the License is distributed on an \"AS IS\" BASIS, #", "c, ph, pw] = -sys.float_info.max argmax_data[i, c, ph, pw] =", "/ float(self.pooled_width) for c in range(self.channels): for ph in range(self.pooled_height):", "import sys import paddle.compat as cpt from op_test import OpTest", "(hend <= hstart) or (wend <= wstart) if is_empty: out_data[i,", "+ roi_start_h, 0), self.height) hend = min(max(hend + roi_start_h, 0),", "y2 = np.random.random_integers( y1 + self.pooled_height, self.height // self.spatial_scale) roi", "= np.random.random_integers( 0, self.width // self.spatial_scale - self.pooled_width) y1 =", "np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) argmax_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width))", "+ 1, 1)) roi_width = int(max(roi_end_w - roi_start_w + 1,", "* bin_size_h)) wend = int(math.ceil((pw + 1) * bin_size_w)) hstart", "bin_size_w)) hend = int(math.ceil((ph + 1) * bin_size_h)) wend =", "roi = self.rois[i] roi_batch_id = roi[0] roi_start_w = int(cpt.round(roi[1] *", "self.spatial_scale = 1.0 / 4.0 self.pooled_height = 2 self.pooled_width =", "self.width) is_empty = (hend <= hstart) or (wend <= wstart)", "= roi[0] roi_start_w = int(cpt.round(roi[1] * self.spatial_scale)) roi_start_h = int(cpt.round(roi[2]", "= min(max(hstart + roi_start_h, 0), self.height) hend = min(max(hend +", "as cpt from op_test import OpTest class TestROIPoolOp(OpTest): def set_data(self):", "np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) for i in range(self.rois_num): roi =", "law or agreed to in writing, software # distributed under", "= (hend <= hstart) or (wend <= wstart) if is_empty:", "np.random.random_integers( 0, self.height // self.spatial_scale - self.pooled_height) x2 = np.random.random_integers(x1", "self.rois_lod)} self.attrs = { 'spatial_scale': self.spatial_scale, 'pooled_height': self.pooled_height, 'pooled_width': self.pooled_width", "= int(cpt.round(roi[1] * self.spatial_scale)) roi_start_h = int(cpt.round(roi[2] * self.spatial_scale)) roi_end_w", "self.pooled_width } self.outputs = {'Out': self.outs, 'Argmax': self.argmaxes} def init_test_case(self):", "roi_start_h = int(cpt.round(roi[2] * self.spatial_scale)) roi_end_w = int(cpt.round(roi[3] * self.spatial_scale))", "for c in range(self.channels): for ph in range(self.pooled_height): for pw", "roi_height = int(max(roi_end_h - roi_start_h + 1, 1)) roi_width =", "= self.rois[i] roi_batch_id = roi[0] roi_start_w = int(cpt.round(roi[1] * self.spatial_scale))", "self.rois_num = len(rois) self.rois = np.array(rois).astype(\"int64\") def setUp(self): self.op_type =", "w self.outs = out_data.astype('float32') self.argmaxes = argmax_data.astype('int64') def make_rois(self): rois", "float(self.pooled_width) for c in range(self.channels): for ph in range(self.pooled_height): for", "<= wstart) if is_empty: out_data[i, c, ph, pw] = 0", "calc_roi_pool(self): out_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) argmax_data = np.zeros((self.rois_num,", "may obtain a copy of the License at # #", "- roi_start_h + 1, 1)) roi_width = int(max(roi_end_w - roi_start_w", "self.make_rois() self.calc_roi_pool() self.inputs = {'X': self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)}", "for w in range(wstart, wend): if x_i[c, h, w] >", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "self.channels, self.pooled_height, self.pooled_width)) for i in range(self.rois_num): roi = self.rois[i]", "may not use this file except in compliance with the", "int(math.floor(ph * bin_size_h)) wstart = int(math.floor(pw * bin_size_w)) hend =", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "- roi_start_w + 1, 1)) x_i = self.x[roi_batch_id] bin_size_h =", "this file except in compliance with the License. # You", "x1 = np.random.random_integers( 0, self.width // self.spatial_scale - self.pooled_width) y1", "= int(cpt.round(roi[3] * self.spatial_scale)) roi_end_h = int(cpt.round(roi[4] * self.spatial_scale)) roi_height", "= { 'spatial_scale': self.spatial_scale, 'pooled_height': self.pooled_height, 'pooled_width': self.pooled_width } self.outputs", "self.spatial_scale, 'pooled_height': self.pooled_height, 'pooled_width': self.pooled_width } self.outputs = {'Out': self.outs,", "__future__ import print_function import unittest import numpy as np import", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "{'Out': self.outs, 'Argmax': self.argmaxes} def init_test_case(self): self.batch_size = 3 self.channels", "self.spatial_scale) roi = [bno, x1, y1, x2, y2] rois.append(roi) self.rois_num", "min(max(wstart + roi_start_w, 0), self.width) wend = min(max(wend + roi_start_w,", "# # Licensed under the Apache License, Version 2.0 (the", "* self.spatial_scale)) roi_start_h = int(cpt.round(roi[2] * self.spatial_scale)) roi_end_w = int(cpt.round(roi[3]", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "numpy as np import math import sys import paddle.compat as", "self.argmaxes = argmax_data.astype('int64') def make_rois(self): rois = [] self.rois_lod =", "self.set_data() def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') if __name__", "* self.width + w self.outs = out_data.astype('float32') self.argmaxes = argmax_data.astype('int64')", "c, ph, pw] = 0 else: out_data[i, c, ph, pw]", "self.pooled_width) y1 = np.random.random_integers( 0, self.height // self.spatial_scale - self.pooled_height)", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "/ float(self.pooled_height) bin_size_w = float(roi_width) / float(self.pooled_width) for c in", "self.pooled_height, self.height // self.spatial_scale) roi = [bno, x1, y1, x2,", "roi_end_w = int(cpt.round(roi[3] * self.spatial_scale)) roi_end_h = int(cpt.round(roi[4] * self.spatial_scale))", "w self.x_dim = (self.batch_size, self.channels, self.height, self.width) self.spatial_scale = 1.0", "roi[0] roi_start_w = int(cpt.round(roi[1] * self.spatial_scale)) roi_start_h = int(cpt.round(roi[2] *", "self.spatial_scale - self.pooled_width) y1 = np.random.random_integers( 0, self.height // self.spatial_scale", "= {'X': self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)} self.attrs = {", "x_i[c, h, w] argmax_data[i, c, ph, pw] = h *", "pw in range(self.pooled_width): hstart = int(math.floor(ph * bin_size_h)) wstart =", "range(self.pooled_width): hstart = int(math.floor(ph * bin_size_h)) wstart = int(math.floor(pw *", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "as np import math import sys import paddle.compat as cpt", "6 self.width = 4 # n, c, h, w self.x_dim", "(wend <= wstart) if is_empty: out_data[i, c, ph, pw] =", "hstart = min(max(hstart + roi_start_h, 0), self.height) hend = min(max(hend", "or implied. # See the License for the specific language", "governing permissions and # limitations under the License. from __future__", "Rights Reserved. # # Licensed under the Apache License, Version", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "y1 = np.random.random_integers( 0, self.height // self.spatial_scale - self.pooled_height) x2", "3 self.channels = 3 self.height = 6 self.width = 4", "= x_i[c, h, w] argmax_data[i, c, ph, pw] = h", "rois.append(roi) self.rois_num = len(rois) self.rois = np.array(rois).astype(\"int64\") def setUp(self): self.op_type", "int(cpt.round(roi[3] * self.spatial_scale)) roi_end_h = int(cpt.round(roi[4] * self.spatial_scale)) roi_height =", "self.rois_lod[0].append(bno + 1) for i in range(bno + 1): x1", "self.height = 6 self.width = 4 # n, c, h,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "+ 1) for i in range(bno + 1): x1 =", "w] > out_data[i, c, ph, pw]: out_data[i, c, ph, pw]", "self.x[roi_batch_id] bin_size_h = float(roi_height) / float(self.pooled_height) bin_size_w = float(roi_width) /", "for ph in range(self.pooled_height): for pw in range(self.pooled_width): hstart =", "2 self.x = np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self): out_data = np.zeros((self.rois_num, self.channels,", "in range(self.pooled_width): hstart = int(math.floor(ph * bin_size_h)) wstart = int(math.floor(pw", "pw] = 0 else: out_data[i, c, ph, pw] = -sys.float_info.max", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "+ self.pooled_height, self.height // self.spatial_scale) roi = [bno, x1, y1,", "min(max(hstart + roi_start_h, 0), self.height) hend = min(max(hend + roi_start_h,", "= np.random.random_integers(x1 + self.pooled_width, self.width // self.spatial_scale) y2 = np.random.random_integers(", "bno in range(self.batch_size): self.rois_lod[0].append(bno + 1) for i in range(bno", "out_data[i, c, ph, pw] = 0 else: out_data[i, c, ph,", "= 0 else: out_data[i, c, ph, pw] = -sys.float_info.max argmax_data[i,", "min(max(wend + roi_start_w, 0), self.width) is_empty = (hend <= hstart)", "permissions and # limitations under the License. from __future__ import", "print_function import unittest import numpy as np import math import", "Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # #", "= 4 # n, c, h, w self.x_dim = (self.batch_size,", "# # Unless required by applicable law or agreed to", "limitations under the License. from __future__ import print_function import unittest", "ph in range(self.pooled_height): for pw in range(self.pooled_width): hstart = int(math.floor(ph", "0), self.width) is_empty = (hend <= hstart) or (wend <=", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "self.spatial_scale)) roi_end_h = int(cpt.round(roi[4] * self.spatial_scale)) roi_height = int(max(roi_end_h -", "Version 2.0 (the \"License\"); # you may not use this", "// self.spatial_scale) roi = [bno, x1, y1, x2, y2] rois.append(roi)", "int(cpt.round(roi[4] * self.spatial_scale)) roi_height = int(max(roi_end_h - roi_start_h + 1,", "if is_empty: out_data[i, c, ph, pw] = 0 else: out_data[i,", "for bno in range(self.batch_size): self.rois_lod[0].append(bno + 1) for i in", "self.outs = out_data.astype('float32') self.argmaxes = argmax_data.astype('int64') def make_rois(self): rois =", "h, w self.x_dim = (self.batch_size, self.channels, self.height, self.width) self.spatial_scale =", "4.0 self.pooled_height = 2 self.pooled_width = 2 self.x = np.random.random(self.x_dim).astype('float32')", "implied. # See the License for the specific language governing", "{'X': self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)} self.attrs = { 'spatial_scale':", "under the Apache License, Version 2.0 (the \"License\"); # you", "= int(math.ceil((ph + 1) * bin_size_h)) wend = int(math.ceil((pw +", "setUp(self): self.op_type = \"roi_pool\" self.set_data() def test_check_output(self): self.check_output() def test_check_grad(self):", "roi_start_w + 1, 1)) x_i = self.x[roi_batch_id] bin_size_h = float(roi_height)", "test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') if __name__ == '__main__':", "+ 1): x1 = np.random.random_integers( 0, self.width // self.spatial_scale -", "> out_data[i, c, ph, pw]: out_data[i, c, ph, pw] =", "sys import paddle.compat as cpt from op_test import OpTest class", "hend = int(math.ceil((ph + 1) * bin_size_h)) wend = int(math.ceil((pw", "by applicable law or agreed to in writing, software #", "self.width // self.spatial_scale) y2 = np.random.random_integers( y1 + self.pooled_height, self.height", "= 2 self.pooled_width = 2 self.x = np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self):", "1.0 / 4.0 self.pooled_height = 2 self.pooled_width = 2 self.x", "3 self.height = 6 self.width = 4 # n, c,", "+ 1) * bin_size_w)) hstart = min(max(hstart + roi_start_h, 0),", "-sys.float_info.max argmax_data[i, c, ph, pw] = -1 for h in", "i in range(self.rois_num): roi = self.rois[i] roi_batch_id = roi[0] roi_start_w", "0 else: out_data[i, c, ph, pw] = -sys.float_info.max argmax_data[i, c,", "self.spatial_scale)) roi_height = int(max(roi_end_h - roi_start_h + 1, 1)) roi_width", "len(rois) self.rois = np.array(rois).astype(\"int64\") def setUp(self): self.op_type = \"roi_pool\" self.set_data()", "= min(max(wend + roi_start_w, 0), self.width) is_empty = (hend <=", "float(roi_width) / float(self.pooled_width) for c in range(self.channels): for ph in", "ph, pw] = x_i[c, h, w] argmax_data[i, c, ph, pw]", "ph, pw] = -1 for h in range(hstart, hend): for", "from op_test import OpTest class TestROIPoolOp(OpTest): def set_data(self): self.init_test_case() self.make_rois()", "in range(self.batch_size): self.rois_lod[0].append(bno + 1) for i in range(bno +", "+ self.pooled_width, self.width // self.spatial_scale) y2 = np.random.random_integers( y1 +", "'pooled_height': self.pooled_height, 'pooled_width': self.pooled_width } self.outputs = {'Out': self.outs, 'Argmax':", "self.spatial_scale)) roi_end_w = int(cpt.round(roi[3] * self.spatial_scale)) roi_end_h = int(cpt.round(roi[4] *", "= float(roi_width) / float(self.pooled_width) for c in range(self.channels): for ph", "# n, c, h, w self.x_dim = (self.batch_size, self.channels, self.height,", "x_i[c, h, w] > out_data[i, c, ph, pw]: out_data[i, c,", "= {'Out': self.outs, 'Argmax': self.argmaxes} def init_test_case(self): self.batch_size = 3", "self.width = 4 # n, c, h, w self.x_dim =", "pw]: out_data[i, c, ph, pw] = x_i[c, h, w] argmax_data[i,", "int(math.floor(pw * bin_size_w)) hend = int(math.ceil((ph + 1) * bin_size_h))", "roi_end_h = int(cpt.round(roi[4] * self.spatial_scale)) roi_height = int(max(roi_end_h - roi_start_h", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "bin_size_h = float(roi_height) / float(self.pooled_height) bin_size_w = float(roi_width) / float(self.pooled_width)", "Unless required by applicable law or agreed to in writing,", "for h in range(hstart, hend): for w in range(wstart, wend):", "pw] = x_i[c, h, w] argmax_data[i, c, ph, pw] =", "the specific language governing permissions and # limitations under the", "out_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) argmax_data = np.zeros((self.rois_num, self.channels,", "rois = [] self.rois_lod = [[]] for bno in range(self.batch_size):", "in range(hstart, hend): for w in range(wstart, wend): if x_i[c,", "applicable law or agreed to in writing, software # distributed", "= self.x[roi_batch_id] bin_size_h = float(roi_height) / float(self.pooled_height) bin_size_w = float(roi_width)", "self.channels = 3 self.height = 6 self.width = 4 #", "0, self.height // self.spatial_scale - self.pooled_height) x2 = np.random.random_integers(x1 +", "PaddlePaddle Authors. All Rights Reserved. # # Licensed under the", "self.attrs = { 'spatial_scale': self.spatial_scale, 'pooled_height': self.pooled_height, 'pooled_width': self.pooled_width }", "import paddle.compat as cpt from op_test import OpTest class TestROIPoolOp(OpTest):", "make_rois(self): rois = [] self.rois_lod = [[]] for bno in", "{ 'spatial_scale': self.spatial_scale, 'pooled_height': self.pooled_height, 'pooled_width': self.pooled_width } self.outputs =", "in writing, software # distributed under the License is distributed", "the License. from __future__ import print_function import unittest import numpy", "roi_start_h + 1, 1)) roi_width = int(max(roi_end_w - roi_start_w +", "= np.random.random_integers( y1 + self.pooled_height, self.height // self.spatial_scale) roi =", "import numpy as np import math import sys import paddle.compat", "self.argmaxes} def init_test_case(self): self.batch_size = 3 self.channels = 3 self.height", "self.pooled_height, self.pooled_width)) argmax_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) for i", "in range(self.rois_num): roi = self.rois[i] roi_batch_id = roi[0] roi_start_w =", "roi_width = int(max(roi_end_w - roi_start_w + 1, 1)) x_i =", "// self.spatial_scale) y2 = np.random.random_integers( y1 + self.pooled_height, self.height //", "2 self.pooled_width = 2 self.x = np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self): out_data", "+ roi_start_h, 0), self.height) wstart = min(max(wstart + roi_start_w, 0),", "= 3 self.height = 6 self.width = 4 # n,", "(self.batch_size, self.channels, self.height, self.width) self.spatial_scale = 1.0 / 4.0 self.pooled_height", "is_empty: out_data[i, c, ph, pw] = 0 else: out_data[i, c,", "* bin_size_w)) hend = int(math.ceil((ph + 1) * bin_size_h)) wend", "int(cpt.round(roi[2] * self.spatial_scale)) roi_end_w = int(cpt.round(roi[3] * self.spatial_scale)) roi_end_h =", "= int(cpt.round(roi[4] * self.spatial_scale)) roi_height = int(max(roi_end_h - roi_start_h +", "self.spatial_scale) y2 = np.random.random_integers( y1 + self.pooled_height, self.height // self.spatial_scale)", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "y2] rois.append(roi) self.rois_num = len(rois) self.rois = np.array(rois).astype(\"int64\") def setUp(self):", "+ roi_start_w, 0), self.width) wend = min(max(wend + roi_start_w, 0),", "# You may obtain a copy of the License at", "(self.rois[:, 1:5], self.rois_lod)} self.attrs = { 'spatial_scale': self.spatial_scale, 'pooled_height': self.pooled_height,", "bin_size_w = float(roi_width) / float(self.pooled_width) for c in range(self.channels): for", "np.array(rois).astype(\"int64\") def setUp(self): self.op_type = \"roi_pool\" self.set_data() def test_check_output(self): self.check_output()", "def make_rois(self): rois = [] self.rois_lod = [[]] for bno", "init_test_case(self): self.batch_size = 3 self.channels = 3 self.height = 6", "self.channels, self.height, self.width) self.spatial_scale = 1.0 / 4.0 self.pooled_height =", "import print_function import unittest import numpy as np import math", "roi_start_w, 0), self.width) wend = min(max(wend + roi_start_w, 0), self.width)", "+ w self.outs = out_data.astype('float32') self.argmaxes = argmax_data.astype('int64') def make_rois(self):", "(c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "int(math.ceil((pw + 1) * bin_size_w)) hstart = min(max(hstart + roi_start_h,", "int(max(roi_end_h - roi_start_h + 1, 1)) roi_width = int(max(roi_end_w -", "else: out_data[i, c, ph, pw] = -sys.float_info.max argmax_data[i, c, ph,", "Authors. All Rights Reserved. # # Licensed under the Apache", "<= hstart) or (wend <= wstart) if is_empty: out_data[i, c,", "= 3 self.channels = 3 self.height = 6 self.width =", "pw] = h * self.width + w self.outs = out_data.astype('float32')", "under the License. from __future__ import print_function import unittest import", "+ 1, 1)) x_i = self.x[roi_batch_id] bin_size_h = float(roi_height) /", "= min(max(wstart + roi_start_w, 0), self.width) wend = min(max(wend +", "c, ph, pw] = x_i[c, h, w] argmax_data[i, c, ph,", "wend = min(max(wend + roi_start_w, 0), self.width) is_empty = (hend", "0, self.width // self.spatial_scale - self.pooled_width) y1 = np.random.random_integers( 0,", "argmax_data[i, c, ph, pw] = -1 for h in range(hstart,", "the License for the specific language governing permissions and #", "wstart) if is_empty: out_data[i, c, ph, pw] = 0 else:", "Apache License, Version 2.0 (the \"License\"); # you may not", "wend): if x_i[c, h, w] > out_data[i, c, ph, pw]:", "* bin_size_h)) wstart = int(math.floor(pw * bin_size_w)) hend = int(math.ceil((ph", "hend = min(max(hend + roi_start_h, 0), self.height) wstart = min(max(wstart", "h in range(hstart, hend): for w in range(wstart, wend): if", "hend): for w in range(wstart, wend): if x_i[c, h, w]", "either express or implied. # See the License for the", "self.op_type = \"roi_pool\" self.set_data() def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'],", "= int(max(roi_end_h - roi_start_h + 1, 1)) roi_width = int(max(roi_end_w", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "= min(max(hend + roi_start_h, 0), self.height) wstart = min(max(wstart +", "wend = int(math.ceil((pw + 1) * bin_size_w)) hstart = min(max(hstart", "paddle.compat as cpt from op_test import OpTest class TestROIPoolOp(OpTest): def", "c, ph, pw] = h * self.width + w self.outs", "= out_data.astype('float32') self.argmaxes = argmax_data.astype('int64') def make_rois(self): rois = []", "x2 = np.random.random_integers(x1 + self.pooled_width, self.width // self.spatial_scale) y2 =", "n, c, h, w self.x_dim = (self.batch_size, self.channels, self.height, self.width)", "+ 1) * bin_size_h)) wend = int(math.ceil((pw + 1) *", "[bno, x1, y1, x2, y2] rois.append(roi) self.rois_num = len(rois) self.rois", "int(math.ceil((ph + 1) * bin_size_h)) wend = int(math.ceil((pw + 1)", "License. from __future__ import print_function import unittest import numpy as", "= -sys.float_info.max argmax_data[i, c, ph, pw] = -1 for h", "self.width // self.spatial_scale - self.pooled_width) y1 = np.random.random_integers( 0, self.height", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "if x_i[c, h, w] > out_data[i, c, ph, pw]: out_data[i,", "= int(math.ceil((pw + 1) * bin_size_w)) hstart = min(max(hstart +", "def setUp(self): self.op_type = \"roi_pool\" self.set_data() def test_check_output(self): self.check_output() def", "h * self.width + w self.outs = out_data.astype('float32') self.argmaxes =", "\"License\"); # you may not use this file except in", "def calc_roi_pool(self): out_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) argmax_data =", "from __future__ import print_function import unittest import numpy as np", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "roi_start_w = int(cpt.round(roi[1] * self.spatial_scale)) roi_start_h = int(cpt.round(roi[2] * self.spatial_scale))", "= np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) argmax_data = np.zeros((self.rois_num, self.channels, self.pooled_height,", "# distributed under the License is distributed on an \"AS", "c in range(self.channels): for ph in range(self.pooled_height): for pw in", "* self.spatial_scale)) roi_end_w = int(cpt.round(roi[3] * self.spatial_scale)) roi_end_h = int(cpt.round(roi[4]", "# Unless required by applicable law or agreed to in", "self.calc_roi_pool() self.inputs = {'X': self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)} self.attrs", "= (self.batch_size, self.channels, self.height, self.width) self.spatial_scale = 1.0 / 4.0", "h, w] argmax_data[i, c, ph, pw] = h * self.width", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "for i in range(bno + 1): x1 = np.random.random_integers( 0,", "-1 for h in range(hstart, hend): for w in range(wstart,", "= np.array(rois).astype(\"int64\") def setUp(self): self.op_type = \"roi_pool\" self.set_data() def test_check_output(self):", "self.pooled_width, self.width // self.spatial_scale) y2 = np.random.random_integers( y1 + self.pooled_height,", "You may obtain a copy of the License at #", "= [] self.rois_lod = [[]] for bno in range(self.batch_size): self.rois_lod[0].append(bno", "= [[]] for bno in range(self.batch_size): self.rois_lod[0].append(bno + 1) for", "np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self): out_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) argmax_data", "the Apache License, Version 2.0 (the \"License\"); # you may", "i in range(bno + 1): x1 = np.random.random_integers( 0, self.width", "np.random.random_integers( y1 + self.pooled_height, self.height // self.spatial_scale) roi = [bno,", "int(max(roi_end_w - roi_start_w + 1, 1)) x_i = self.x[roi_batch_id] bin_size_h", "- self.pooled_width) y1 = np.random.random_integers( 0, self.height // self.spatial_scale -" ]
[ "import * from test_whole_flow import * from test_handlers_metaclass_magic import *", "from test_handlers_paginate import * from test_handlers_paginate_data import * from test_handlers_inject_data_hook", "from test_handlers_post import * from test_handlers_put import * from test_handlers_delete", "from test_handlers_metaclass_magic import * from test_handlers_serialize_to_python import * from test_handlers_is_method_allowed", "test_handlers_order_data import * from test_handlers_paginate import * from test_handlers_paginate_data import", "import * from test_handlers_is_method_allowed import * from test_handlers_data_control import *", "* from test_handlers_serialize_to_python import * from test_handlers_is_method_allowed import * from", "* from test_serializers import * from test_deserializers import * from", "test_handlers_is_method_allowed import * from test_handlers_data_control import * from test_handlers_package import", "import * from test_handlers_paginate_data import * from test_handlers_inject_data_hook import *", "test_handlers_cleanse_body import * from test_handlers_validate import * from test_handlers_clean_models import", "test_handlers_clean_models import * from test_handlers_get import * from test_handlers_is_catastrophic import", "* from test_authentication import * from test_whole_flow import * from", "from test_exceptions import * from test_authentication import * from test_whole_flow", "test_handlers_validate import * from test_handlers_clean_models import * from test_handlers_get import", "test_handlers_serialize_to_python import * from test_handlers_is_method_allowed import * from test_handlers_data_control import", "test_handlers_finalize_pending import * from test_handlers_cleanse_body import * from test_handlers_validate import", "* from test_handlers_is_catastrophic import * from test_handlers_post import * from", "import * from test_serializers import * from test_deserializers import *", "import * from test_exceptions import * from test_authentication import *", "* from test_handlers_finalize_pending import * from test_handlers_cleanse_body import * from", "* from test_exceptions import * from test_authentication import * from", "* from test_handlers_get import * from test_handlers_is_catastrophic import * from", "* from test_handlers_order import * from test_handlers_order_data import * from", "import * from test_handlers_finalize_pending import * from test_handlers_cleanse_body import *", "import * from test_handlers_is_catastrophic import * from test_handlers_post import *", "import * from test_handlers_validate import * from test_handlers_clean_models import *", "* from test_handlers_validate import * from test_handlers_clean_models import * from", "* from test_whole_flow import * from test_handlers_metaclass_magic import * from", "* from test_handlers_data_control import * from test_handlers_package import * from", "* from test_handlers_package import * from test_handlers_finalize_pending import * from", "import * from test_handlers_put import * from test_handlers_delete import *", "* from test_handlers_put import * from test_handlers_delete import * from", "from test_proxy import * from test_serializers import * from test_deserializers", "test_handlers_patch_response import * from test_handlers_authentication_hook import * from test_handlers_filter_data import", "test_handlers_package import * from test_handlers_finalize_pending import * from test_handlers_cleanse_body import", "* from test_deserializers import * from test_exceptions import * from", "* from test_handlers_cleanse_body import * from test_handlers_validate import * from", "test_serializers import * from test_deserializers import * from test_exceptions import", "* from test_handlers_authentication_hook import * from test_handlers_filter_data import * from", "* from test_handlers_paginate_data import * from test_handlers_inject_data_hook import * from", "from test_handlers_package import * from test_handlers_finalize_pending import * from test_handlers_cleanse_body", "test_handlers_delete import * from test_handlers_patch_response import * from test_handlers_authentication_hook import", "import * from test_handlers_package import * from test_handlers_finalize_pending import *", "import * from test_handlers_post import * from test_handlers_put import *", "from test_authentication import * from test_whole_flow import * from test_handlers_metaclass_magic", "* from test_handlers_filter_data import * from test_handlers_order import * from", "test_handlers_data_control import * from test_handlers_package import * from test_handlers_finalize_pending import", "test_handlers_filter_data import * from test_handlers_order import * from test_handlers_order_data import", "test_handlers_get import * from test_handlers_is_catastrophic import * from test_handlers_post import", "import * from test_handlers_order import * from test_handlers_order_data import *", "* from test_handlers_order_data import * from test_handlers_paginate import * from", "from test_handlers_filter_data import * from test_handlers_order import * from test_handlers_order_data", "* from test_handlers_inject_data_hook import * from test_handlers_handle_exception import * from", "import * from test_deserializers import * from test_exceptions import *", "import * from test_handlers_filter_data import * from test_handlers_order import *", "import * from test_handlers_patch_response import * from test_handlers_authentication_hook import *", "import * from test_handlers_authentication_hook import * from test_handlers_filter_data import *", "import * from test_handlers_order_data import * from test_handlers_paginate import *", "test_proxy import * from test_serializers import * from test_deserializers import", "import * from test_handlers_delete import * from test_handlers_patch_response import *", "import * from test_handlers_paginate import * from test_handlers_paginate_data import *", "test_deserializers import * from test_exceptions import * from test_authentication import", "from test_serializers import * from test_deserializers import * from test_exceptions", "from test_handlers_delete import * from test_handlers_patch_response import * from test_handlers_authentication_hook", "from test_handlers_order import * from test_handlers_order_data import * from test_handlers_paginate", "from test_handlers_is_method_allowed import * from test_handlers_data_control import * from test_handlers_package", "from test_handlers_finalize_pending import * from test_handlers_cleanse_body import * from test_handlers_validate", "import * from test_handlers_handle_exception import * from test_handlers_deserialize_body import *", "from test_whole_flow import * from test_handlers_metaclass_magic import * from test_handlers_serialize_to_python", "* from test_handlers_clean_models import * from test_handlers_get import * from", "test_handlers_authentication_hook import * from test_handlers_filter_data import * from test_handlers_order import", "from test_handlers_order_data import * from test_handlers_paginate import * from test_handlers_paginate_data", "* from test_handlers_delete import * from test_handlers_patch_response import * from", "* from test_handlers_is_method_allowed import * from test_handlers_data_control import * from", "test_handlers_put import * from test_handlers_delete import * from test_handlers_patch_response import", "import * from test_authentication import * from test_whole_flow import *", "test_handlers_paginate_data import * from test_handlers_inject_data_hook import * from test_handlers_handle_exception import", "from test_deserializers import * from test_exceptions import * from test_authentication", "from test_handlers_get import * from test_handlers_is_catastrophic import * from test_handlers_post", "import * from test_handlers_clean_models import * from test_handlers_get import *", "* from test_handlers_patch_response import * from test_handlers_authentication_hook import * from", "import * from test_handlers_inject_data_hook import * from test_handlers_handle_exception import *", "from test_handlers_paginate_data import * from test_handlers_inject_data_hook import * from test_handlers_handle_exception", "from test_handlers_cleanse_body import * from test_handlers_validate import * from test_handlers_clean_models", "test_exceptions import * from test_authentication import * from test_whole_flow import", "from test_handlers_inject_data_hook import * from test_handlers_handle_exception import * from test_handlers_deserialize_body", "import * from test_handlers_data_control import * from test_handlers_package import *", "test_handlers_metaclass_magic import * from test_handlers_serialize_to_python import * from test_handlers_is_method_allowed import", "from test_handlers_is_catastrophic import * from test_handlers_post import * from test_handlers_put", "from test_handlers_put import * from test_handlers_delete import * from test_handlers_patch_response", "import * from test_handlers_serialize_to_python import * from test_handlers_is_method_allowed import *", "import * from test_handlers_metaclass_magic import * from test_handlers_serialize_to_python import *", "from test_handlers_patch_response import * from test_handlers_authentication_hook import * from test_handlers_filter_data", "<reponame>movermeyer/django-firestone from test_proxy import * from test_serializers import * from", "from test_handlers_authentication_hook import * from test_handlers_filter_data import * from test_handlers_order", "test_handlers_order import * from test_handlers_order_data import * from test_handlers_paginate import", "test_handlers_inject_data_hook import * from test_handlers_handle_exception import * from test_handlers_deserialize_body import", "test_handlers_is_catastrophic import * from test_handlers_post import * from test_handlers_put import", "* from test_handlers_paginate import * from test_handlers_paginate_data import * from", "from test_handlers_serialize_to_python import * from test_handlers_is_method_allowed import * from test_handlers_data_control", "from test_handlers_data_control import * from test_handlers_package import * from test_handlers_finalize_pending", "test_handlers_paginate import * from test_handlers_paginate_data import * from test_handlers_inject_data_hook import", "from test_handlers_validate import * from test_handlers_clean_models import * from test_handlers_get", "test_whole_flow import * from test_handlers_metaclass_magic import * from test_handlers_serialize_to_python import", "import * from test_handlers_get import * from test_handlers_is_catastrophic import *", "import * from test_handlers_cleanse_body import * from test_handlers_validate import *", "* from test_handlers_post import * from test_handlers_put import * from", "test_handlers_post import * from test_handlers_put import * from test_handlers_delete import", "test_authentication import * from test_whole_flow import * from test_handlers_metaclass_magic import", "* from test_handlers_metaclass_magic import * from test_handlers_serialize_to_python import * from", "from test_handlers_clean_models import * from test_handlers_get import * from test_handlers_is_catastrophic" ]
[ "and index_string[511] == '0'): grid = addLayerOnes(grid) output_grid = np.ones((len(grid),len(grid[0])),dtype=int)", "= False index_string = '' grid = [] for i", "= np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid =", "= [] for i in input_array: if i == '':", "or (iter % 2 == 0 and index_string[511] == '0'):", "% 2 == 0 and index_string[511] == '0'): grid =", "for i in input_array: if i == '': splitvalue =", "'1' and index_string [511] == '1') or (iter % 2", "binStr += str(grid[i+k][j+l]) output_grid[i-1][j-1] = index_string[int(binStr,2)] return output_grid #pictureEnhancer(test_array,2) #pictureEnhancer(input_array,2)", "= np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid", "False index_string = '' grid = [] for i in", "<gh_stars>0 import numpy as np raw = open(\"inputs/20.txt\",\"r\").readlines() input_array= [(i.replace('\\n',", "j in range(1, len(grid[i])-1): binStr = '' for k in", "#if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if", "'1')) for i in test_raw] def addLayerZero(grid): #if sum(np.asarray(grid)[:,0]) >", "pixels is:', sum(sum(grid))) def enhancer(grid, index_string,iter): print(iter) if iter ==", "= addLayerZero(grid) output_grid = np.zeros((len(grid),len(grid[0])),dtype=int) grid = addLayerZero(grid) elif (index_string[0]", "> 0: grid = np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) >", "i in raw] test_raw = open(\"inputs/20_test.txt\",\"r\").readlines() test_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1'))", "# if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:]))", "return grid def pictureEnhancer(input_array,iter): splitvalue = False index_string = ''", "index_string = '' grid = [] for i in input_array:", "range(1, len(grid[i])-1): binStr = '' for k in range(-1,2): for", "= enhancer(grid, index_string,x) print('The number of lit pixels is:', sum(sum(grid)))", "[511] == '1') or (iter % 2 == 0 and", "= np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid =", "grid = np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0:", "> 0: grid = np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) >", "(index_string[0] == '1' and index_string [511] == '1') or (iter", "test_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i in test_raw] def addLayerZero(grid):", "enhancer(grid, index_string,iter): print(iter) if iter == 1 or index_string[0] ==", "open(\"inputs/20_test.txt\",\"r\").readlines() test_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i in test_raw] def", "'': splitvalue = True continue if not splitvalue: index_string +=", "> 0: grid = np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) >", "lit pixels is:', sum(sum(grid))) def enhancer(grid, index_string,iter): print(iter) if iter", "or index_string[0] == '0' or (iter % 2 == 1", "input_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i in raw] test_raw =", "index_string [511] == '1') or (iter % 2 == 0", "if i == '': splitvalue = True continue if not", "i in row] for row in grid] for x in", "np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.ones(len(grid[0]),", "grid = addLayerOnes(grid) output_grid = np.ones((len(grid),len(grid[0])),dtype=int) grid = addLayerOnes(grid) for", "for j in range(1, len(grid[i])-1): binStr = '' for k", "in range(-1,2): for l in range(-1,2): binStr += str(grid[i+k][j+l]) output_grid[i-1][j-1]", "numpy as np raw = open(\"inputs/20.txt\",\"r\").readlines() input_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1'))", "in test_raw] def addLayerZero(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid =", "dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid))", "grid = np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid", "index_string[511] == '0'): grid = addLayerZero(grid) output_grid = np.zeros((len(grid),len(grid[0])),dtype=int) grid", "raw = open(\"inputs/20.txt\",\"r\").readlines() input_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i in", "open(\"inputs/20.txt\",\"r\").readlines() input_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i in raw] test_raw", "np.zeros((len(grid),len(grid[0])),dtype=int) grid = addLayerZero(grid) elif (index_string[0] == '1' and index_string", "= '' grid = [] for i in input_array: if", "sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1])", "index_string[0] == '0' or (iter % 2 == 1 and", "= addLayerZero(grid) elif (index_string[0] == '1' and index_string [511] ==", "sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1])", "= np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def addLayerOnes(grid): #if sum(np.asarray(grid)[:,0])", "= open(\"inputs/20.txt\",\"r\").readlines() input_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i in raw]", "np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.ones(len(grid[0]),", "row in grid] for x in range(1,iter+1): grid = enhancer(grid,", "'0' or (iter % 2 == 1 and index_string[511] ==", "np raw = open(\"inputs/20.txt\",\"r\").readlines() input_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i", "k in range(-1,2): for l in range(-1,2): binStr += str(grid[i+k][j+l])", "0 and index_string[511] == '0'): grid = addLayerOnes(grid) output_grid =", "grid = np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid", "dtype=int)[np.newaxis,:])) return grid def addLayerOnes(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid", "splitvalue = False index_string = '' grid = [] for", "for row in grid] for x in range(1,iter+1): grid =", "== '0'): grid = addLayerZero(grid) output_grid = np.zeros((len(grid),len(grid[0])),dtype=int) grid =", "sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:])) return grid", "0: grid = np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def pictureEnhancer(input_array,iter):", "= np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def pictureEnhancer(input_array,iter): splitvalue =", "iter == 1 or index_string[0] == '0' or (iter %", "range(1,len(grid)-1): for j in range(1, len(grid[i])-1): binStr = '' for", "enhancer(grid, index_string,x) print('The number of lit pixels is:', sum(sum(grid))) def", "grid = np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def pictureEnhancer(input_array,iter): splitvalue", "sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:])", "grid = np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def addLayerOnes(grid): #if", "for x in range(1,iter+1): grid = enhancer(grid, index_string,x) print('The number", "'1') or (iter % 2 == 0 and index_string[511] ==", "1 and index_string[511] == '0'): grid = addLayerZero(grid) output_grid =", "[(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i in test_raw] def addLayerZero(grid): #if", "2 == 0 and index_string[511] == '0'): grid = addLayerOnes(grid)", "== '0'): grid = addLayerOnes(grid) output_grid = np.ones((len(grid),len(grid[0])),dtype=int) grid =", "> 0: grid = np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) >", "grid = [[int(i) for i in row] for row in", "np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid =", "np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def pictureEnhancer(input_array,iter): splitvalue = False", "np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.zeros(len(grid[0]),", "for i in row] for row in grid] for x", "grid = addLayerZero(grid) output_grid = np.zeros((len(grid),len(grid[0])),dtype=int) grid = addLayerZero(grid) elif", "splitvalue: index_string += i else: grid.append(list(i)) grid = [[int(i) for", "sum(sum(grid))) def enhancer(grid, index_string,iter): print(iter) if iter == 1 or", "# if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis]))", "0: grid = np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0:", "'0'): grid = addLayerZero(grid) output_grid = np.zeros((len(grid),len(grid[0])),dtype=int) grid = addLayerZero(grid)", "and index_string[511] == '0'): grid = addLayerZero(grid) output_grid = np.zeros((len(grid),len(grid[0])),dtype=int)", "not splitvalue: index_string += i else: grid.append(list(i)) grid = [[int(i)", "= addLayerOnes(grid) for i in range(1,len(grid)-1): for j in range(1,", "0: grid = np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def addLayerOnes(grid):", "= True continue if not splitvalue: index_string += i else:", "output_grid = np.ones((len(grid),len(grid[0])),dtype=int) grid = addLayerOnes(grid) for i in range(1,len(grid)-1):", "sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis])) # if", "grid = addLayerOnes(grid) for i in range(1,len(grid)-1): for j in", "True continue if not splitvalue: index_string += i else: grid.append(list(i))", "0: grid = np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0:", "and index_string [511] == '1') or (iter % 2 ==", "for l in range(-1,2): binStr += str(grid[i+k][j+l]) output_grid[i-1][j-1] = index_string[int(binStr,2)]", "of lit pixels is:', sum(sum(grid))) def enhancer(grid, index_string,iter): print(iter) if", "in raw] test_raw = open(\"inputs/20_test.txt\",\"r\").readlines() test_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for", "np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid =", "'').replace('.','0').replace('#', '1')) for i in test_raw] def addLayerZero(grid): #if sum(np.asarray(grid)[:,0])", "i == '': splitvalue = True continue if not splitvalue:", "grid def addLayerOnes(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.ones(len(grid),", "def enhancer(grid, index_string,iter): print(iter) if iter == 1 or index_string[0]", "grid def pictureEnhancer(input_array,iter): splitvalue = False index_string = '' grid", "i in range(1,len(grid)-1): for j in range(1, len(grid[i])-1): binStr =", "sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis])) # if", "in range(1, len(grid[i])-1): binStr = '' for k in range(-1,2):", "if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis])) #", "> 0: grid = np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def", "return grid def addLayerOnes(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid =", "#if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid)) #if", "index_string,iter): print(iter) if iter == 1 or index_string[0] == '0'", "== '': splitvalue = True continue if not splitvalue: index_string", "grid = [] for i in input_array: if i ==", "[] for i in input_array: if i == '': splitvalue", "splitvalue = True continue if not splitvalue: index_string += i", "(iter % 2 == 0 and index_string[511] == '0'): grid", "str(grid[i+k][j+l]) output_grid[i-1][j-1] = index_string[int(binStr,2)] return output_grid #pictureEnhancer(test_array,2) #pictureEnhancer(input_array,2) pictureEnhancer(test_array,50) pictureEnhancer(input_array,50)", "> 0: grid = np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:])", "print(iter) if iter == 1 or index_string[0] == '0' or", "np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid)) #", "0: grid = np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0:", "i in input_array: if i == '': splitvalue = True", "in range(1,iter+1): grid = enhancer(grid, index_string,x) print('The number of lit", "i in test_raw] def addLayerZero(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid", "+= i else: grid.append(list(i)) grid = [[int(i) for i in", "if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis])) #", "def pictureEnhancer(input_array,iter): splitvalue = False index_string = '' grid =", "in input_array: if i == '': splitvalue = True continue", "dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.ones(len(grid), dtype=int)[:,", "== 1 or index_string[0] == '0' or (iter % 2", "# if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis]))", "grid] for x in range(1,iter+1): grid = enhancer(grid, index_string,x) print('The", "elif (index_string[0] == '1' and index_string [511] == '1') or", "dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.zeros(len(grid), dtype=int)[:,", "2 == 1 and index_string[511] == '0'): grid = addLayerZero(grid)", "in grid] for x in range(1,iter+1): grid = enhancer(grid, index_string,x)", "input_array: if i == '': splitvalue = True continue if", "len(grid[i])-1): binStr = '' for k in range(-1,2): for l", "binStr = '' for k in range(-1,2): for l in", "= np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid", "import numpy as np raw = open(\"inputs/20.txt\",\"r\").readlines() input_array= [(i.replace('\\n', '').replace('.','0').replace('#',", "1 or index_string[0] == '0' or (iter % 2 ==", "np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.zeros(len(grid),", "print('The number of lit pixels is:', sum(sum(grid))) def enhancer(grid, index_string,iter):", "dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid))", "for i in raw] test_raw = open(\"inputs/20_test.txt\",\"r\").readlines() test_array= [(i.replace('\\n', '').replace('.','0').replace('#',", "= open(\"inputs/20_test.txt\",\"r\").readlines() test_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i in test_raw]", "addLayerZero(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid))", "sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:])) return grid", "% 2 == 1 and index_string[511] == '0'): grid =", "range(-1,2): binStr += str(grid[i+k][j+l]) output_grid[i-1][j-1] = index_string[int(binStr,2)] return output_grid #pictureEnhancer(test_array,2)", "in range(1,len(grid)-1): for j in range(1, len(grid[i])-1): binStr = ''", "grid.append(list(i)) grid = [[int(i) for i in row] for row", "is:', sum(sum(grid))) def enhancer(grid, index_string,iter): print(iter) if iter == 1", "# if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:]))", "= addLayerOnes(grid) output_grid = np.ones((len(grid),len(grid[0])),dtype=int) grid = addLayerOnes(grid) for i", "range(1,iter+1): grid = enhancer(grid, index_string,x) print('The number of lit pixels", "else: grid.append(list(i)) grid = [[int(i) for i in row] for", "#if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid)) #if", "> 0: grid = np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:])", "== '0' or (iter % 2 == 1 and index_string[511]", "== '1') or (iter % 2 == 0 and index_string[511]", "np.ones((len(grid),len(grid[0])),dtype=int) grid = addLayerOnes(grid) for i in range(1,len(grid)-1): for j", "np.ones(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def pictureEnhancer(input_array,iter): splitvalue = False index_string", "0: grid = np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) >", "= np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid =", "number of lit pixels is:', sum(sum(grid))) def enhancer(grid, index_string,iter): print(iter)", "= np.zeros((len(grid),len(grid[0])),dtype=int) grid = addLayerZero(grid) elif (index_string[0] == '1' and", "'' grid = [] for i in input_array: if i", "== 0 and index_string[511] == '0'): grid = addLayerOnes(grid) output_grid", "> 0: grid = np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def", "+= str(grid[i+k][j+l]) output_grid[i-1][j-1] = index_string[int(binStr,2)] return output_grid #pictureEnhancer(test_array,2) #pictureEnhancer(input_array,2) pictureEnhancer(test_array,50)", "for k in range(-1,2): for l in range(-1,2): binStr +=", "'' for k in range(-1,2): for l in range(-1,2): binStr", "= '' for k in range(-1,2): for l in range(-1,2):", "for i in range(1,len(grid)-1): for j in range(1, len(grid[i])-1): binStr", "as np raw = open(\"inputs/20.txt\",\"r\").readlines() input_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for", "if iter == 1 or index_string[0] == '0' or (iter", "raw] test_raw = open(\"inputs/20_test.txt\",\"r\").readlines() test_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i", "np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.ones(len(grid),", "= [[int(i) for i in row] for row in grid]", "def addLayerOnes(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.ones(len(grid), dtype=int)[:,", "continue if not splitvalue: index_string += i else: grid.append(list(i)) grid", "= np.ones((len(grid),len(grid[0])),dtype=int) grid = addLayerOnes(grid) for i in range(1,len(grid)-1): for", "#if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if", "0: grid = np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0:", "'').replace('.','0').replace('#', '1')) for i in raw] test_raw = open(\"inputs/20_test.txt\",\"r\").readlines() test_array=", "for i in test_raw] def addLayerZero(grid): #if sum(np.asarray(grid)[:,0]) > 0:", "dtype=int)[np.newaxis,:])) return grid def pictureEnhancer(input_array,iter): splitvalue = False index_string =", "pictureEnhancer(input_array,iter): splitvalue = False index_string = '' grid = []", "x in range(1,iter+1): grid = enhancer(grid, index_string,x) print('The number of", "grid = enhancer(grid, index_string,x) print('The number of lit pixels is:',", "addLayerZero(grid) output_grid = np.zeros((len(grid),len(grid[0])),dtype=int) grid = addLayerZero(grid) elif (index_string[0] ==", "test_raw = open(\"inputs/20_test.txt\",\"r\").readlines() test_array= [(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i in", "np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid)) #", "addLayerOnes(grid) output_grid = np.ones((len(grid),len(grid[0])),dtype=int) grid = addLayerOnes(grid) for i in", "if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:])) return", "dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid,", "i else: grid.append(list(i)) grid = [[int(i) for i in row]", "grid = np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid", "grid = np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid", "or (iter % 2 == 1 and index_string[511] == '0'):", "np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def addLayerOnes(grid): #if sum(np.asarray(grid)[:,0]) >", "np.zeros(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def addLayerOnes(grid): #if sum(np.asarray(grid)[:,0]) > 0:", "addLayerOnes(grid) for i in range(1,len(grid)-1): for j in range(1, len(grid[i])-1):", "if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:])) return", "'0'): grid = addLayerOnes(grid) output_grid = np.ones((len(grid),len(grid[0])),dtype=int) grid = addLayerOnes(grid)", "in row] for row in grid] for x in range(1,iter+1):", "[[int(i) for i in row] for row in grid] for", "== 1 and index_string[511] == '0'): grid = addLayerZero(grid) output_grid", "== '1' and index_string [511] == '1') or (iter %", "def addLayerZero(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.zeros(len(grid), dtype=int)[:,", "grid = np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0:", "addLayerZero(grid) elif (index_string[0] == '1' and index_string [511] == '1')", "test_raw] def addLayerZero(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.zeros(len(grid),", "0: grid = np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) >", "l in range(-1,2): binStr += str(grid[i+k][j+l]) output_grid[i-1][j-1] = index_string[int(binStr,2)] return", "in range(-1,2): binStr += str(grid[i+k][j+l]) output_grid[i-1][j-1] = index_string[int(binStr,2)] return output_grid", "(iter % 2 == 1 and index_string[511] == '0'): grid", "row] for row in grid] for x in range(1,iter+1): grid", "[(i.replace('\\n', '').replace('.','0').replace('#', '1')) for i in raw] test_raw = open(\"inputs/20_test.txt\",\"r\").readlines()", "addLayerOnes(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid))", "output_grid = np.zeros((len(grid),len(grid[0])),dtype=int) grid = addLayerZero(grid) elif (index_string[0] == '1'", "if not splitvalue: index_string += i else: grid.append(list(i)) grid =", "sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:])", "index_string,x) print('The number of lit pixels is:', sum(sum(grid))) def enhancer(grid,", "range(-1,2): for l in range(-1,2): binStr += str(grid[i+k][j+l]) output_grid[i-1][j-1] =", "grid = addLayerZero(grid) elif (index_string[0] == '1' and index_string [511]", "index_string[511] == '0'): grid = addLayerOnes(grid) output_grid = np.ones((len(grid),len(grid[0])),dtype=int) grid", "'1')) for i in raw] test_raw = open(\"inputs/20_test.txt\",\"r\").readlines() test_array= [(i.replace('\\n',", "np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.zeros(len(grid[0]),", "= np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid =", "index_string += i else: grid.append(list(i)) grid = [[int(i) for i" ]
[ "QtCore.QEvent.LanguageChange: self.retranslateUi() super(Demo, self).changeEvent(event) def retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello,", "super(Demo, self).__init__() self.button = QtWidgets.QPushButton() self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo =", "self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label) options = ([('English', ''), ('français', 'eng-fr' ), ('中文',", "'eng-fr' ), ('中文', 'eng-chs'), ]) for i, (text, lang) in", "self.combo.setItemData(i, lang) self.retranslateUi() @QtCore.pyqtSlot(int) def change_func(self, index): data = self.combo.itemData(index)", "sys from PyQt5 import QtCore, QtGui, QtWidgets class Demo(QtWidgets.QWidget): def", "= QtCore.QTranslator(self) self.v_layout = QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label) options =", "self.combo.currentIndexChanged.connect(self.change_func) self.trans = QtCore.QTranslator(self) self.v_layout = QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label)", "self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World')) if __name__ == '__main__': app", "), ('中文', 'eng-chs'), ]) for i, (text, lang) in enumerate(options):", "'Hello, World')) if __name__ == '__main__': app = QtWidgets.QApplication(sys.argv) demo", "QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func) self.trans = QtCore.QTranslator(self) self.v_layout = QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button)", "QtCore.QTranslator(self) self.v_layout = QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label) options = ([('English',", "super(Demo, self).changeEvent(event) def retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World')) if", "= ([('English', ''), ('français', 'eng-fr' ), ('中文', 'eng-chs'), ]) for", "__name__ == '__main__': app = QtWidgets.QApplication(sys.argv) demo = Demo() demo.show()", "i, (text, lang) in enumerate(options): self.combo.addItem(text) self.combo.setItemData(i, lang) self.retranslateUi() @QtCore.pyqtSlot(int)", "self.retranslateUi() super(Demo, self).changeEvent(event) def retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World'))", "= QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label) options = ([('English', ''), ('français',", "import sys from PyQt5 import QtCore, QtGui, QtWidgets class Demo(QtWidgets.QWidget):", "from PyQt5 import QtCore, QtGui, QtWidgets class Demo(QtWidgets.QWidget): def __init__(self):", "def change_func(self, index): data = self.combo.itemData(index) if data: self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans)", "in enumerate(options): self.combo.addItem(text) self.combo.setItemData(i, lang) self.retranslateUi() @QtCore.pyqtSlot(int) def change_func(self, index):", "import QtCore, QtGui, QtWidgets class Demo(QtWidgets.QWidget): def __init__(self): super(Demo, self).__init__()", "QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo = QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func) self.trans = QtCore.QTranslator(self) self.v_layout =", "lang) in enumerate(options): self.combo.addItem(text) self.combo.setItemData(i, lang) self.retranslateUi() @QtCore.pyqtSlot(int) def change_func(self,", "]) for i, (text, lang) in enumerate(options): self.combo.addItem(text) self.combo.setItemData(i, lang)", "event): if event.type() == QtCore.QEvent.LanguageChange: self.retranslateUi() super(Demo, self).changeEvent(event) def retranslateUi(self):", "def changeEvent(self, event): if event.type() == QtCore.QEvent.LanguageChange: self.retranslateUi() super(Demo, self).changeEvent(event)", "QtWidgets class Demo(QtWidgets.QWidget): def __init__(self): super(Demo, self).__init__() self.button = QtWidgets.QPushButton()", "def __init__(self): super(Demo, self).__init__() self.button = QtWidgets.QPushButton() self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter)", "if event.type() == QtCore.QEvent.LanguageChange: self.retranslateUi() super(Demo, self).changeEvent(event) def retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo',", "change_func(self, index): data = self.combo.itemData(index) if data: self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans) else:", "self.combo = QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func) self.trans = QtCore.QTranslator(self) self.v_layout = QtWidgets.QVBoxLayout(self)", "= self.combo.itemData(index) if data: self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans) else: QtWidgets.QApplication.instance().removeTranslator(self.trans) def changeEvent(self,", "Demo(QtWidgets.QWidget): def __init__(self): super(Demo, self).__init__() self.button = QtWidgets.QPushButton() self.label =", "changeEvent(self, event): if event.type() == QtCore.QEvent.LanguageChange: self.retranslateUi() super(Demo, self).changeEvent(event) def", "self).changeEvent(event) def retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World')) if __name__", "= QtWidgets.QPushButton() self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo = QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func) self.trans", "def retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World')) if __name__ ==", "''), ('français', 'eng-fr' ), ('中文', 'eng-chs'), ]) for i, (text,", "self.combo.itemData(index) if data: self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans) else: QtWidgets.QApplication.instance().removeTranslator(self.trans) def changeEvent(self, event):", "class Demo(QtWidgets.QWidget): def __init__(self): super(Demo, self).__init__() self.button = QtWidgets.QPushButton() self.label", "= QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo = QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func) self.trans = QtCore.QTranslator(self) self.v_layout", "('français', 'eng-fr' ), ('中文', 'eng-chs'), ]) for i, (text, lang)", "for i, (text, lang) in enumerate(options): self.combo.addItem(text) self.combo.setItemData(i, lang) self.retranslateUi()", "(text, lang) in enumerate(options): self.combo.addItem(text) self.combo.setItemData(i, lang) self.retranslateUi() @QtCore.pyqtSlot(int) def", "data = self.combo.itemData(index) if data: self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans) else: QtWidgets.QApplication.instance().removeTranslator(self.trans) def", "enumerate(options): self.combo.addItem(text) self.combo.setItemData(i, lang) self.retranslateUi() @QtCore.pyqtSlot(int) def change_func(self, index): data", "self.v_layout = QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label) options = ([('English', ''),", "QtWidgets.QApplication.instance().installTranslator(self.trans) else: QtWidgets.QApplication.instance().removeTranslator(self.trans) def changeEvent(self, event): if event.type() == QtCore.QEvent.LanguageChange:", "<gh_stars>100-1000 import sys from PyQt5 import QtCore, QtGui, QtWidgets class", "QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label) options = ([('English', ''), ('français', 'eng-fr'", "@QtCore.pyqtSlot(int) def change_func(self, index): data = self.combo.itemData(index) if data: self.trans.load(data)", "QtCore, QtGui, QtWidgets class Demo(QtWidgets.QWidget): def __init__(self): super(Demo, self).__init__() self.button", "self.button = QtWidgets.QPushButton() self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo = QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func)", "self.retranslateUi() @QtCore.pyqtSlot(int) def change_func(self, index): data = self.combo.itemData(index) if data:", "self.combo.addItem(text) self.combo.setItemData(i, lang) self.retranslateUi() @QtCore.pyqtSlot(int) def change_func(self, index): data =", "else: QtWidgets.QApplication.instance().removeTranslator(self.trans) def changeEvent(self, event): if event.type() == QtCore.QEvent.LanguageChange: self.retranslateUi()", "self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World')) if __name__ == '__main__': app = QtWidgets.QApplication(sys.argv)", "QtGui, QtWidgets class Demo(QtWidgets.QWidget): def __init__(self): super(Demo, self).__init__() self.button =", "if __name__ == '__main__': app = QtWidgets.QApplication(sys.argv) demo = Demo()", "lang) self.retranslateUi() @QtCore.pyqtSlot(int) def change_func(self, index): data = self.combo.itemData(index) if", "if data: self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans) else: QtWidgets.QApplication.instance().removeTranslator(self.trans) def changeEvent(self, event): if", "== '__main__': app = QtWidgets.QApplication(sys.argv) demo = Demo() demo.show() sys.exit(app.exec_())", "= QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func) self.trans = QtCore.QTranslator(self) self.v_layout = QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo)", "event.type() == QtCore.QEvent.LanguageChange: self.retranslateUi() super(Demo, self).changeEvent(event) def retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start'))", "__init__(self): super(Demo, self).__init__() self.button = QtWidgets.QPushButton() self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo", "options = ([('English', ''), ('français', 'eng-fr' ), ('中文', 'eng-chs'), ])", "self).__init__() self.button = QtWidgets.QPushButton() self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo = QtWidgets.QComboBox(self)", "World')) if __name__ == '__main__': app = QtWidgets.QApplication(sys.argv) demo =", "self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label) options = ([('English', ''), ('français', 'eng-fr' ),", "self.trans = QtCore.QTranslator(self) self.v_layout = QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label) options", "data: self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans) else: QtWidgets.QApplication.instance().removeTranslator(self.trans) def changeEvent(self, event): if event.type()", "QtWidgets.QPushButton() self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo = QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func) self.trans =", "index): data = self.combo.itemData(index) if data: self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans) else: QtWidgets.QApplication.instance().removeTranslator(self.trans)", "self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans) else: QtWidgets.QApplication.instance().removeTranslator(self.trans) def changeEvent(self, event): if event.type() ==", "'eng-chs'), ]) for i, (text, lang) in enumerate(options): self.combo.addItem(text) self.combo.setItemData(i,", "'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World')) if __name__ == '__main__': app =", "('中文', 'eng-chs'), ]) for i, (text, lang) in enumerate(options): self.combo.addItem(text)", "== QtCore.QEvent.LanguageChange: self.retranslateUi() super(Demo, self).changeEvent(event) def retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo',", "self.v_layout.addWidget(self.label) options = ([('English', ''), ('français', 'eng-fr' ), ('中文', 'eng-chs'),", "retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World')) if __name__ == '__main__':", "self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo = QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func) self.trans = QtCore.QTranslator(self)", "QtWidgets.QApplication.instance().removeTranslator(self.trans) def changeEvent(self, event): if event.type() == QtCore.QEvent.LanguageChange: self.retranslateUi() super(Demo,", "([('English', ''), ('français', 'eng-fr' ), ('中文', 'eng-chs'), ]) for i,", "PyQt5 import QtCore, QtGui, QtWidgets class Demo(QtWidgets.QWidget): def __init__(self): super(Demo," ]
[ "} } } } } def test_http_code_from_handler(app_with_raises_and_handler, client): response =", "test_error_descriptions_from_raises(app_with_raises, client): response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"500\":", "description\", \"content\": { \"application/json\": { \"schema\": { \"$ref\": \"#/components/schemas/ErrorResponse\" }", "} } def test_http_code_from_handler(app_with_raises_and_handler, client): response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"]", "client): response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"500\": {", "Something something. :raises KeyError: KeyError description \"\"\" oapi.init_app(app) def test_error_descriptions_from_raises(app_with_raises,", "def test_http_code_from_handler(app_with_raises_and_handler, client): response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == {", "app_with_raises_and_handler(app): oapi = FlaskApistrap() oapi.add_error_handler(KeyError, 515, lambda e: ErrorResponse()) @app.route(\"/\",", "description \"\"\" oapi.init_app(app) @pytest.fixture() def app_with_raises_and_handler(app): oapi = FlaskApistrap() oapi.add_error_handler(KeyError,", "\"$ref\": \"#/components/schemas/ErrorResponse\" } } } } } def test_http_code_from_handler(app_with_raises_and_handler, client):", "== { \"515\": { \"description\": \"KeyError description\", \"content\": { \"application/json\":", "import ErrorResponse @pytest.fixture() def app_with_raises(app): oapi = FlaskApistrap() @app.route(\"/\", methods=[\"GET\"])", "apistrap.flask import FlaskApistrap from apistrap.schemas import ErrorResponse @pytest.fixture() def app_with_raises(app):", "{ \"description\": \"KeyError description\", \"content\": { \"application/json\": { \"schema\": {", "KeyError: KeyError description \"\"\" oapi.init_app(app) def test_error_descriptions_from_raises(app_with_raises, client): response =", "oapi.init_app(app) @pytest.fixture() def app_with_raises_and_handler(app): oapi = FlaskApistrap() oapi.add_error_handler(KeyError, 515, lambda", "{ \"515\": { \"description\": \"KeyError description\", \"content\": { \"application/json\": {", "def app_with_raises_and_handler(app): oapi = FlaskApistrap() oapi.add_error_handler(KeyError, 515, lambda e: ErrorResponse())", "@pytest.fixture() def app_with_raises(app): oapi = FlaskApistrap() @app.route(\"/\", methods=[\"GET\"]) def view():", "FlaskApistrap from apistrap.schemas import ErrorResponse @pytest.fixture() def app_with_raises(app): oapi =", "KeyError description \"\"\" oapi.init_app(app) @pytest.fixture() def app_with_raises_and_handler(app): oapi = FlaskApistrap()", "FlaskApistrap() oapi.add_error_handler(KeyError, 515, lambda e: ErrorResponse()) @app.route(\"/\", methods=[\"GET\"]) def view():", "from apistrap.flask import FlaskApistrap from apistrap.schemas import ErrorResponse @pytest.fixture() def", ":raises KeyError: KeyError description \"\"\" oapi.init_app(app) def test_error_descriptions_from_raises(app_with_raises, client): response", "FlaskApistrap() @app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\" Something something. :raises KeyError:", "oapi = FlaskApistrap() @app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\" Something something.", "e: ErrorResponse()) @app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\" Something something. :raises", "from apistrap.schemas import ErrorResponse @pytest.fixture() def app_with_raises(app): oapi = FlaskApistrap()", "response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"500\": { \"description\":", "== { \"500\": { \"description\": \"KeyError description\", \"content\": { \"application/json\":", "\"content\": { \"application/json\": { \"schema\": { \"$ref\": \"#/components/schemas/ErrorResponse\" } }", "ErrorResponse @pytest.fixture() def app_with_raises(app): oapi = FlaskApistrap() @app.route(\"/\", methods=[\"GET\"]) def", "ErrorResponse()) @app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\" Something something. :raises KeyError:", "\"\"\" Something something. :raises KeyError: KeyError description \"\"\" oapi.init_app(app) @pytest.fixture()", "\"500\": { \"description\": \"KeyError description\", \"content\": { \"application/json\": { \"schema\":", "test_http_code_from_handler(app_with_raises_and_handler, client): response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"515\":", "response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"515\": { \"description\": \"KeyError description\", \"content\": {", "} def test_http_code_from_handler(app_with_raises_and_handler, client): response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] ==", "client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"515\": { \"description\": \"KeyError description\",", "oapi = FlaskApistrap() oapi.add_error_handler(KeyError, 515, lambda e: ErrorResponse()) @app.route(\"/\", methods=[\"GET\"])", "{ \"application/json\": { \"schema\": { \"$ref\": \"#/components/schemas/ErrorResponse\" } } }", "\"schema\": { \"$ref\": \"#/components/schemas/ErrorResponse\" } } } } } def", "pytest from apistrap.flask import FlaskApistrap from apistrap.schemas import ErrorResponse @pytest.fixture()", "515, lambda e: ErrorResponse()) @app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\" Something", "view(): \"\"\" Something something. :raises KeyError: KeyError description \"\"\" oapi.init_app(app)", "import pytest from apistrap.flask import FlaskApistrap from apistrap.schemas import ErrorResponse", "\"\"\" oapi.init_app(app) @pytest.fixture() def app_with_raises_and_handler(app): oapi = FlaskApistrap() oapi.add_error_handler(KeyError, 515,", "def test_error_descriptions_from_raises(app_with_raises, client): response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == {", "= client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"500\": { \"description\": \"KeyError", "client): response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"515\": {", "{ \"500\": { \"description\": \"KeyError description\", \"content\": { \"application/json\": {", ":raises KeyError: KeyError description \"\"\" oapi.init_app(app) @pytest.fixture() def app_with_raises_and_handler(app): oapi", "def view(): \"\"\" Something something. :raises KeyError: KeyError description \"\"\"", "} } } def test_http_code_from_handler(app_with_raises_and_handler, client): response = client.get(\"/spec.json\") assert", "\"application/json\": { \"schema\": { \"$ref\": \"#/components/schemas/ErrorResponse\" } } } }", "response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"515\": { \"description\":", "{ \"$ref\": \"#/components/schemas/ErrorResponse\" } } } } } def test_http_code_from_handler(app_with_raises_and_handler,", "oapi.add_error_handler(KeyError, 515, lambda e: ErrorResponse()) @app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\"", "\"#/components/schemas/ErrorResponse\" } } } } } def test_http_code_from_handler(app_with_raises_and_handler, client): response", "response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"500\": { \"description\": \"KeyError description\", \"content\": {", "\"\"\" Something something. :raises KeyError: KeyError description \"\"\" oapi.init_app(app) def", "something. :raises KeyError: KeyError description \"\"\" oapi.init_app(app) def test_error_descriptions_from_raises(app_with_raises, client):", "= client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"515\": { \"description\": \"KeyError", "client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"500\": { \"description\": \"KeyError description\",", "def app_with_raises(app): oapi = FlaskApistrap() @app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\"", "something. :raises KeyError: KeyError description \"\"\" oapi.init_app(app) @pytest.fixture() def app_with_raises_and_handler(app):", "\"description\": \"KeyError description\", \"content\": { \"application/json\": { \"schema\": { \"$ref\":", "apistrap.schemas import ErrorResponse @pytest.fixture() def app_with_raises(app): oapi = FlaskApistrap() @app.route(\"/\",", "@app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\" Something something. :raises KeyError: KeyError", "description \"\"\" oapi.init_app(app) def test_error_descriptions_from_raises(app_with_raises, client): response = client.get(\"/spec.json\") assert", "\"\"\" oapi.init_app(app) def test_error_descriptions_from_raises(app_with_raises, client): response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"]", "app_with_raises(app): oapi = FlaskApistrap() @app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\" Something", "lambda e: ErrorResponse()) @app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\" Something something.", "@pytest.fixture() def app_with_raises_and_handler(app): oapi = FlaskApistrap() oapi.add_error_handler(KeyError, 515, lambda e:", "= FlaskApistrap() oapi.add_error_handler(KeyError, 515, lambda e: ErrorResponse()) @app.route(\"/\", methods=[\"GET\"]) def", "\"515\": { \"description\": \"KeyError description\", \"content\": { \"application/json\": { \"schema\":", "= FlaskApistrap() @app.route(\"/\", methods=[\"GET\"]) def view(): \"\"\" Something something. :raises", "assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"515\": { \"description\": \"KeyError description\", \"content\":", "\"KeyError description\", \"content\": { \"application/json\": { \"schema\": { \"$ref\": \"#/components/schemas/ErrorResponse\"", "oapi.init_app(app) def test_error_descriptions_from_raises(app_with_raises, client): response = client.get(\"/spec.json\") assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] ==", "Something something. :raises KeyError: KeyError description \"\"\" oapi.init_app(app) @pytest.fixture() def", "assert response.json[\"paths\"][\"/\"][\"get\"][\"responses\"] == { \"500\": { \"description\": \"KeyError description\", \"content\":", "KeyError: KeyError description \"\"\" oapi.init_app(app) @pytest.fixture() def app_with_raises_and_handler(app): oapi =", "import FlaskApistrap from apistrap.schemas import ErrorResponse @pytest.fixture() def app_with_raises(app): oapi", "{ \"schema\": { \"$ref\": \"#/components/schemas/ErrorResponse\" } } } } }", "} } } } def test_http_code_from_handler(app_with_raises_and_handler, client): response = client.get(\"/spec.json\")", "methods=[\"GET\"]) def view(): \"\"\" Something something. :raises KeyError: KeyError description", "KeyError description \"\"\" oapi.init_app(app) def test_error_descriptions_from_raises(app_with_raises, client): response = client.get(\"/spec.json\")" ]
[ "projects.util.ZohoHttpClient import ZohoHttpClient from projects.api.Api import Api from projects.parser.UsersParser import", "1.Get all the users in the given project. \"\"\" def", "project_id(long): Project id. Returns: list of instance: List of users", "Project id. Returns: list of instance: List of users object.", "def get_users(self, project_id): \"\"\"Get all the users in the given", "Args: project_id(long): Project id. Returns: list of instance: List of", "'/projects/' + str(project_id) + '/users/' response = zoho_http_client.get(url, self.details) return", "def __init__(self, authtoken, portal_id): \"\"\"Initialize Users api using user's authtoken", "to 1.Get all the users in the given project. \"\"\"", "Returns: list of instance: List of users object. \"\"\" url", "in the given project. \"\"\" def __init__(self, authtoken, portal_id): \"\"\"Initialize", "and portal id. Args: authtoken(str): User's authtoken. portal_id(str): User's portal", "used to 1.Get all the users in the given project.", "from projects.util.ZohoHttpClient import ZohoHttpClient from projects.api.Api import Api from projects.parser.UsersParser", "portal_id def get_users(self, project_id): \"\"\"Get all the users in the", "the given project. Args: project_id(long): Project id. Returns: list of", "authtoken. portal_id(str): User's portal id. \"\"\" self.details = { 'authtoken':", "portal id. Args: authtoken(str): User's authtoken. portal_id(str): User's portal id.", "import ZohoHttpClient from projects.api.Api import Api from projects.parser.UsersParser import UsersParser", "User's authtoken. portal_id(str): User's portal id. \"\"\" self.details = {", "id. \"\"\" self.details = { 'authtoken': authtoken } self.portal_id =", "users object. \"\"\" url = base_url + 'portal/' + str(self.portal_id)", "\"\"\" self.details = { 'authtoken': authtoken } self.portal_id = portal_id", "import UsersParser base_url = Api().base_url zoho_http_client = ZohoHttpClient() parser =", "users in the given project. \"\"\" def __init__(self, authtoken, portal_id):", "\"\"\"Get all the users in the given project. Args: project_id(long):", "'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/users/' response", "= Api().base_url zoho_http_client = ZohoHttpClient() parser = UsersParser() class UsersApi:", "object. \"\"\" url = base_url + 'portal/' + str(self.portal_id) +", "zoho_http_client = ZohoHttpClient() parser = UsersParser() class UsersApi: \"\"\"Users Api", "List of users object. \"\"\" url = base_url + 'portal/'", "all the users in the given project. \"\"\" def __init__(self,", "of instance: List of users object. \"\"\" url = base_url", "str(self.portal_id) + '/projects/' + str(project_id) + '/users/' response = zoho_http_client.get(url,", "project. \"\"\" def __init__(self, authtoken, portal_id): \"\"\"Initialize Users api using", "UsersParser() class UsersApi: \"\"\"Users Api class is used to 1.Get", "\"\"\" def __init__(self, authtoken, portal_id): \"\"\"Initialize Users api using user's", "+ '/projects/' + str(project_id) + '/users/' response = zoho_http_client.get(url, self.details)", "Users api using user's authtoken and portal id. Args: authtoken(str):", "= { 'authtoken': authtoken } self.portal_id = portal_id def get_users(self,", "projects.parser.UsersParser import UsersParser base_url = Api().base_url zoho_http_client = ZohoHttpClient() parser", "base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) +", "ZohoHttpClient from projects.api.Api import Api from projects.parser.UsersParser import UsersParser base_url", "= ZohoHttpClient() parser = UsersParser() class UsersApi: \"\"\"Users Api class", "the users in the given project. Args: project_id(long): Project id.", "id. Args: authtoken(str): User's authtoken. portal_id(str): User's portal id. \"\"\"", "authtoken(str): User's authtoken. portal_id(str): User's portal id. \"\"\" self.details =", "the users in the given project. \"\"\" def __init__(self, authtoken,", "Api from projects.parser.UsersParser import UsersParser base_url = Api().base_url zoho_http_client =", "Api().base_url zoho_http_client = ZohoHttpClient() parser = UsersParser() class UsersApi: \"\"\"Users", "\"\"\" url = base_url + 'portal/' + str(self.portal_id) + '/projects/'", "using user's authtoken and portal id. Args: authtoken(str): User's authtoken.", "the given project. \"\"\" def __init__(self, authtoken, portal_id): \"\"\"Initialize Users", "self.details = { 'authtoken': authtoken } self.portal_id = portal_id def", "get_users(self, project_id): \"\"\"Get all the users in the given project.", "project. Args: project_id(long): Project id. Returns: list of instance: List", "class UsersApi: \"\"\"Users Api class is used to 1.Get all", "in the given project. Args: project_id(long): Project id. Returns: list", "= portal_id def get_users(self, project_id): \"\"\"Get all the users in", "__init__(self, authtoken, portal_id): \"\"\"Initialize Users api using user's authtoken and", "User's portal id. \"\"\" self.details = { 'authtoken': authtoken }", "id. Returns: list of instance: List of users object. \"\"\"", "+ str(project_id) + '/users/' response = zoho_http_client.get(url, self.details) return parser.get_users(response)", "base_url = Api().base_url zoho_http_client = ZohoHttpClient() parser = UsersParser() class", "given project. \"\"\" def __init__(self, authtoken, portal_id): \"\"\"Initialize Users api", "authtoken, portal_id): \"\"\"Initialize Users api using user's authtoken and portal", "UsersParser base_url = Api().base_url zoho_http_client = ZohoHttpClient() parser = UsersParser()", "is used to 1.Get all the users in the given", "from projects.parser.UsersParser import UsersParser base_url = Api().base_url zoho_http_client = ZohoHttpClient()", "= base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id)", "\"\"\"Initialize Users api using user's authtoken and portal id. Args:", "#$Id$ from projects.util.ZohoHttpClient import ZohoHttpClient from projects.api.Api import Api from", "<reponame>chamathshashika/projects-python-wrappers #$Id$ from projects.util.ZohoHttpClient import ZohoHttpClient from projects.api.Api import Api", "Api class is used to 1.Get all the users in", "ZohoHttpClient() parser = UsersParser() class UsersApi: \"\"\"Users Api class is", "portal_id): \"\"\"Initialize Users api using user's authtoken and portal id.", "user's authtoken and portal id. Args: authtoken(str): User's authtoken. portal_id(str):", "from projects.api.Api import Api from projects.parser.UsersParser import UsersParser base_url =", "portal_id(str): User's portal id. \"\"\" self.details = { 'authtoken': authtoken", "'authtoken': authtoken } self.portal_id = portal_id def get_users(self, project_id): \"\"\"Get", "{ 'authtoken': authtoken } self.portal_id = portal_id def get_users(self, project_id):", "+ 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/users/'", "authtoken and portal id. Args: authtoken(str): User's authtoken. portal_id(str): User's", "projects.api.Api import Api from projects.parser.UsersParser import UsersParser base_url = Api().base_url", "api using user's authtoken and portal id. Args: authtoken(str): User's", "url = base_url + 'portal/' + str(self.portal_id) + '/projects/' +", "project_id): \"\"\"Get all the users in the given project. Args:", "class is used to 1.Get all the users in the", "\"\"\"Users Api class is used to 1.Get all the users", "UsersApi: \"\"\"Users Api class is used to 1.Get all the", "instance: List of users object. \"\"\" url = base_url +", "list of instance: List of users object. \"\"\" url =", "users in the given project. Args: project_id(long): Project id. Returns:", "= UsersParser() class UsersApi: \"\"\"Users Api class is used to", "Args: authtoken(str): User's authtoken. portal_id(str): User's portal id. \"\"\" self.details", "self.portal_id = portal_id def get_users(self, project_id): \"\"\"Get all the users", "+ str(self.portal_id) + '/projects/' + str(project_id) + '/users/' response =", "given project. Args: project_id(long): Project id. Returns: list of instance:", "of users object. \"\"\" url = base_url + 'portal/' +", "import Api from projects.parser.UsersParser import UsersParser base_url = Api().base_url zoho_http_client", "parser = UsersParser() class UsersApi: \"\"\"Users Api class is used", "authtoken } self.portal_id = portal_id def get_users(self, project_id): \"\"\"Get all", "} self.portal_id = portal_id def get_users(self, project_id): \"\"\"Get all the", "all the users in the given project. Args: project_id(long): Project", "portal id. \"\"\" self.details = { 'authtoken': authtoken } self.portal_id" ]
[ "at_goal() and not rospy.is_shutdown(): if start_disabled: [pub.publish(Empty()) for pub in", "goal position untuck_goal = map(diff_check, angles, self._joint_moves['untuck'][limb]) tuck_goal = map(diff_check,", "robot, to avoid arm jerking from \"force-field\". head = baxter_interface.Head()", "all(self._arm_state['tuck'][limb] == 'tuck' for limb in self._limbs): rospy.loginfo(\"Tucking: Arms already", "disabled[limb]: self._disable_pub[limb].publish(Empty()) if limb in tuck: self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb]))) self._check_arm_state()", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "# Check if shoulder is flipped over peak self._arm_state['flipped'][limb] =", "= baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub = rospy.Publisher('robot/set_super_enable', Bool, queue_size=10) def _update_collision(self, data,", "need to be disabled to get the arm around the", "Check if in a goal position untuck_goal = map(diff_check, angles,", "Arms already in 'Tucked' position.\") self._done = True return else:", "to tuck/untuck Baxter's arms to/from the shipping pose \"\"\" import", "# # 1. Redistributions of source code must retain the", "above copyright notice, # this list of conditions and the", "in the # documentation and/or other materials provided with the", "{ 'tuck': { 'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01,", "0.0, 0.01, 0.0], 'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01,", "= 'untuck' disabled[limb] = False self._move_to(actions, disabled) # Disable collision", "self._tuck_rate.sleep() def _move_to(self, tuck, disabled): if any(disabled.values()): [pub.publish(Empty()) for pub", "not self._rs.state().enabled: self._enable_pub.publish(True) head.set_pan(0.0, 0.5, timeout=0) self._tuck_rate.sleep() if start_disabled: while", "# # Redistribution and use in source and binary forms,", "node... \") rospy.init_node(\"rsdk_tuck_arms\") rospy.loginfo(\"%sucking arms\" % (\"T\" if tuck else", "= deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck', 'right': 'untuck'} self._move_to(actions, suppress)", "IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE", "rospy.logwarn('Aborting: Shutting down safely...') if any(self._arm_state['collide'].values()): while self._rs.state().enabled != False:", "#!/usr/bin/env python # Copyright (c) 2013-2015, Rethink Robotics # All", "notice, # this list of conditions and the following disclaimer.", "self._rs = baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub = rospy.Publisher('robot/set_super_enable', Bool, queue_size=10) def _update_collision(self,", "for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def _move_to(self, tuck, disabled):", "return def supervised_tuck(self): # Update our starting state to check", "import ( CollisionAvoidanceState, ) from baxter_interface import CHECK_VERSION class Tuck(object):", "goal in tuck.viewitems()) and not rospy.is_shutdown()): if self._rs.state().enabled == False:", "in binary form must reproduce the above copyright # notice,", "without specific prior written permission. # # THIS SOFTWARE IS", "import CHECK_VERSION class Tuck(object): def __init__(self, tuck_cmd): self._done = False", "not self._rs.state().enabled at_goal = lambda: (abs(head.pan()) <= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo(\"Moving head", "# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF", "'left') self._collide_rsub = rospy.Subscriber( 'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'right') self._disable_pub =", "= map(diff_check, angles, self._joint_moves['untuck'][limb]) tuck_goal = map(diff_check, angles[0:2], self._joint_moves['tuck'][limb][0:2]) if", "# before enabling robot, to avoid arm jerking from \"force-field\".", "for goals and behind collision field. If s1 joint is", "rospy.Subscriber( 'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'left') self._collide_rsub = rospy.Subscriber( 'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState,", "= [self._arms[limb].joint_angle(joint) for joint in self._arms[limb].joint_names()] # Check if in", "from # this software without specific prior written permission. #", "Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown) tucker.supervised_tuck() rospy.loginfo(\"Finished tuck\") if __name__ == \"__main__\": main()", "for limb in self._limbs: if not self._arm_state['flipped'][limb]: actions[limb] = 'untuck'", "any_flipped else \"off\") # Move to neutral pose before tucking", "limb in self._limbs: angles = [self._arms[limb].joint_angle(joint) for joint in self._arms[limb].joint_names()]", "THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR", "list of conditions and the following disclaimer in the #", "NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF", "from baxter_core_msgs.msg import ( CollisionAvoidanceState, ) from baxter_interface import CHECK_VERSION", "to neutral pose before tucking arms to avoid damage self._check_arm_state()", "else \"Unt\",)) tucker = Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown) tucker.supervised_tuck() rospy.loginfo(\"Finished tuck\") if", "in self._limbs: if not self._arm_state['flipped'][limb]: actions[limb] = 'untuck' disabled[limb] =", "rospy.is_shutdown(): [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def _move_to(self,", "== 'tuck' for limb in self._limbs): rospy.loginfo(\"Tucking: Arms already in", "endorse or promote products derived from # this software without", "'none' # Check if shoulder is flipped over peak self._arm_state['flipped'][limb]", "start_disabled: while self._rs.state().enabled == True and not rospy.is_shutdown(): [pub.publish(Empty()) for", "Shutting down safely...') if any(self._arm_state['collide'].values()): while self._rs.state().enabled != False: [pub.publish(Empty())", "and untuck arms if any(self._arm_state['flipped'].values()): rospy.loginfo(\"Untucking: One or more arms", "'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50] } }", "force-field. \"\"\" diff_check = lambda a, b: abs(a - b)", "source and binary forms, with or without # modification, are", "abs(a - b) <= self._tuck_threshold for limb in self._limbs: angles", "self._arm_state['flipped'][limb] = ( self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle) def _prepare_to_tuck(self):", "INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER", "-3.0, 2.55, -0.0, 0.01, 0.0] }, 'untuck': { 'left': [-0.08,", "is over the peak, collision will need to be disabled", "2.55, -0.0, 0.01, 0.0] }, 'untuck': { 'left': [-0.08, -1.0,", "} } self._collide_lsub = rospy.Subscriber( 'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'left') self._collide_rsub", "Empty, queue_size=10) } self._rs = baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub = rospy.Publisher('robot/set_super_enable', Bool,", "map(diff_check, angles, self._joint_moves['untuck'][limb]) tuck_goal = map(diff_check, angles[0:2], self._joint_moves['tuck'][limb][0:2]) if all(untuck_goal):", "args.tuck rospy.loginfo(\"Initializing node... \") rospy.init_node(\"rsdk_tuck_arms\") rospy.loginfo(\"%sucking arms\" % (\"T\" if", "% (\"T\" if tuck else \"Unt\",)) tucker = Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown)", "and untucking.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck',", "self._update_collision, 'left') self._collide_rsub = rospy.Subscriber( 'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'right') self._disable_pub", "CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "# documentation and/or other materials provided with the distribution. #", "in self._limbs): rospy.loginfo(\"Tucking: Arms already in 'Tucked' position.\") self._done =", "# Disable collision and Tuck Arms rospy.loginfo(\"Tucking: Tucking with collision", "damage self._check_arm_state() actions = dict() disabled = {'left': True, 'right':", "Tucked.\") any_flipped = not all(self._arm_state['flipped'].values()) if any_flipped: rospy.loginfo( \"Moving to", "arms\") args = parser.parse_args(rospy.myargv()[1:]) tuck = args.tuck rospy.loginfo(\"Initializing node... \")", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE", "= False self._move_to(actions, disabled) # Disable collision and Tuck Arms", "CHECK_VERSION class Tuck(object): def __init__(self, tuck_cmd): self._done = False self._limbs", "self._enable_pub.publish(False) self._tuck_rate.sleep() def main(): parser = argparse.ArgumentParser() tuck_group = parser.add_mutually_exclusive_group(required=True)", "= rospy.Subscriber( 'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'right') self._disable_pub = { 'left':", "= 'tuck' else: self._arm_state['tuck'][limb] = 'none' # Check if shoulder", "'tuck': {'left': 'none', 'right': 'none'}, 'collide': {'left': False, 'right': False},", "AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR IMPLIED", "= { 'left': baxter_interface.Limb('left'), 'right': baxter_interface.Limb('right'), } self._tuck = tuck_cmd", "self._joint_moves = { 'tuck': { 'left': [-1.0, -2.07, 3.0, 2.55,", "self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle) def _prepare_to_tuck(self): # If arms", "args = parser.parse_args(rospy.myargv()[1:]) tuck = args.tuck rospy.loginfo(\"Initializing node... \") rospy.init_node(\"rsdk_tuck_arms\")", "disclaimer. # 2. Redistributions in binary form must reproduce the", "Untuck Arms else: # If arms are tucked disable collision", "'left': rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance', Empty, queue_size=10), 'right': rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance', Empty, queue_size=10)", "3.0, 2.55, 0.0, 0.01, 0.0], 'right': [1.0, -2.07, -3.0, 2.55,", "safely...') if any(self._arm_state['collide'].values()): while self._rs.state().enabled != False: [pub.publish(Empty()) for pub", "and binary forms, with or without # modification, are permitted", "names of its # contributors may be used to endorse", "lambda: (abs(head.pan()) <= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo(\"Moving head to neutral position\") while", "get the arm around the head-arm collision force-field. \"\"\" diff_check", "self._rs.state().enabled at_goal = lambda: (abs(head.pan()) <= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo(\"Moving head to", "while not at_goal() and not rospy.is_shutdown(): if start_disabled: [pub.publish(Empty()) for", "Update our starting state to check if arms are tucked", "field. If s1 joint is over the peak, collision will", "form must reproduce the above copyright # notice, this list", "0 self._check_arm_state() def _check_arm_state(self): \"\"\" Check for goals and behind", "self._arm_state = { 'tuck': {'left': 'none', 'right': 'none'}, 'collide': {'left':", "self._tuck_rate.sleep() if start_disabled: while self._rs.state().enabled == True and not rospy.is_shutdown():", "before tucking arms to avoid damage self._check_arm_state() actions = dict()", "disabled[limb] = False self._move_to(actions, disabled) # Disable collision and Tuck", "= rospy.Subscriber( 'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'left') self._collide_rsub = rospy.Subscriber( 'robot/limb/right/collision_avoidance_state',", "of source code must retain the above copyright notice, #", "self._arms[limb].joint_names()] # Check if in a goal position untuck_goal =", "baxter_interface.Head() start_disabled = not self._rs.state().enabled at_goal = lambda: (abs(head.pan()) <=", "the peak, collision will need to be disabled to get", "joint in self._arms[limb].joint_names()] # Check if in a goal position", "not at_goal() and not rospy.is_shutdown(): if start_disabled: [pub.publish(Empty()) for pub", "for limb in self._limbs: if disabled[limb]: self._disable_pub[limb].publish(Empty()) if limb in", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE", "this software without specific prior written permission. # # THIS", "= rospy.Rate(20.0) # Hz self._tuck_threshold = 0.2 # radians self._peak_angle", "True and not rospy.is_shutdown(): [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False)", "any_flipped = not all(self._arm_state['flipped'].values()) if any_flipped: rospy.loginfo( \"Moving to neutral", "Disabling Collision Avoidance and untucking.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions", "rospy.loginfo(\"Untucking: Arms already Untucked;\" \" Moving to neutral position.\") self._check_arm_state()", "written permission. # # THIS SOFTWARE IS PROVIDED BY THE", "# this list of conditions and the following disclaimer. #", "{'left': True, 'right': True} for limb in self._limbs: if not", "[pub.publish(Empty()) for pub in self._disable_pub.values()] while (any(self._arm_state['tuck'][limb] != goal for", "position untuck_goal = map(diff_check, angles, self._joint_moves['untuck'][limb]) tuck_goal = map(diff_check, angles[0:2],", "(\"T\" if tuck else \"Unt\",)) tucker = Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown) tucker.supervised_tuck()", "Empty, queue_size=10), 'right': rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance', Empty, queue_size=10) } self._rs =", "and Tuck Arms rospy.loginfo(\"Tucking: Tucking with collision avoidance off.\") actions", "already Untucked;\" \" Moving to neutral position.\") self._check_arm_state() suppress =", "disabled = {'left': True, 'right': True} for limb in self._limbs:", "self._tuck_rate = rospy.Rate(20.0) # Hz self._tuck_threshold = 0.2 # radians", "GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR", "WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF", "be disabled to get the arm around the head-arm collision", "self._tuck_threshold for limb in self._limbs: angles = [self._arms[limb].joint_angle(joint) for joint", "Tuck Arms rospy.loginfo(\"Tucking: Tucking with collision avoidance off.\") actions =", "reproduce the above copyright # notice, this list of conditions", "head.set_pan(0.0, 0.5, timeout=0) self._tuck_rate.sleep() if start_disabled: while self._rs.state().enabled == True", "software without specific prior written permission. # # THIS SOFTWARE", "== True and not rospy.is_shutdown(): [pub.publish(Empty()) for pub in self._disable_pub.values()]", "rospy.loginfo(\"Tucking: Tucking with collision avoidance off.\") actions = {'left': 'tuck',", "avoidance off.\") actions = {'left': 'tuck', 'right': 'tuck'} disabled =", "{ 'left': baxter_interface.Limb('left'), 'right': baxter_interface.Limb('right'), } self._tuck = tuck_cmd self._tuck_rate", "state, disable collision avoidance # before enabling robot, to avoid", "DAMAGE. \"\"\" Tool to tuck/untuck Baxter's arms to/from the shipping", "rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance', Empty, queue_size=10), 'right': rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance', Empty, queue_size=10) }", "any(self._arm_state['collide'].values()): while self._rs.state().enabled != False: [pub.publish(Empty()) for pub in self._disable_pub.values()]", "the shipping pose \"\"\" import argparse from copy import deepcopy", "BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,", "will need to be disabled to get the arm around", "rospy.loginfo( \"Moving to neutral start position with collision %s.\", \"on\"", "True} for limb in self._limbs: if not self._arm_state['flipped'][limb]: actions[limb] =", "limb in self._limbs): rospy.loginfo(\"Tucking: Arms already in 'Tucked' position.\") self._done", "'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0] }, 'untuck':", "3. Neither the name of the Rethink Robotics nor the", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "self._tuck_rate.sleep() if any(self._arm_state['collide'].values()): self._rs.disable() return def supervised_tuck(self): # Update our", "following conditions are met: # # 1. Redistributions of source", "\"AS IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "[-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50], 'right': [0.08, -1.0,", "to neutral position\") while not at_goal() and not rospy.is_shutdown(): if", "while self._rs.state().enabled == True and not rospy.is_shutdown(): [pub.publish(Empty()) for pub", "joint is over the peak, collision will need to be", "BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR", "arms are in \"tucked\" state, disable collision avoidance # before", "collision avoidance # before enabling robot, to avoid arm jerking", "starting state to check if arms are tucked self._prepare_to_tuck() self._check_arm_state()", "any_flipped: rospy.loginfo( \"Moving to neutral start position with collision %s.\",", "\"\"\" diff_check = lambda a, b: abs(a - b) <=", "TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "<= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo(\"Moving head to neutral position\") while not at_goal()", "if self._rs.state().enabled == False: self._enable_pub.publish(True) for limb in self._limbs: if", "CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES,", "rospy.loginfo(\"Untucking: One or more arms Tucked;\" \" Disabling Collision Avoidance", "if not self._rs.state().enabled: self._enable_pub.publish(True) head.set_pan(0.0, 0.5, timeout=0) self._tuck_rate.sleep() if start_disabled:", "to neutral start position with collision %s.\", \"on\" if any_flipped", "= Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown) tucker.supervised_tuck() rospy.loginfo(\"Finished tuck\") if __name__ == \"__main__\":", "'flipped': {'left': False, 'right': False} } self._joint_moves = { 'tuck':", "IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. \"\"\"", "not rospy.is_shutdown(): [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def", "= { 'left': rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance', Empty, queue_size=10), 'right': rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance',", "in tuck: self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb]))) self._check_arm_state() self._tuck_rate.sleep() if any(self._arm_state['collide'].values()): self._rs.disable()", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL", "self._peak_angle) def _prepare_to_tuck(self): # If arms are in \"tucked\" state,", "== True: # If arms are already tucked, report this", "for limb in self._limbs): rospy.loginfo(\"Tucking: Arms already in 'Tucked' position.\")", "# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "collision and untuck arms if any(self._arm_state['flipped'].values()): rospy.loginfo(\"Untucking: One or more", "0.0] }, 'untuck': { 'left': [-0.08, -1.0, -1.19, 1.94, 0.67,", "'untuck', 'right': 'untuck'} self._move_to(actions, suppress) self._done = True return #", "CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN #", "# this software without specific prior written permission. # #", "'tuck' for limb in self._limbs): rospy.loginfo(\"Tucking: Arms already in 'Tucked'", "self._enable_pub.publish(False) self._tuck_rate.sleep() def _move_to(self, tuck, disabled): if any(disabled.values()): [pub.publish(Empty()) for", "tuck else \"Unt\",)) tucker = Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown) tucker.supervised_tuck() rospy.loginfo(\"Finished tuck\")", "[pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def main(): parser", "with collision %s.\", \"on\" if any_flipped else \"off\") # Move", "AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT,", "Bool, ) import baxter_interface from baxter_core_msgs.msg import ( CollisionAvoidanceState, )", "suppress) self._done = True return def clean_shutdown(self): \"\"\"Handles ROS shutdown", "True, 'right': True} self._move_to(actions, disabled) self._done = True return #", "'right': False} } self._joint_moves = { 'tuck': { 'left': [-1.0,", "move to neutral location else: rospy.loginfo(\"Untucking: Arms already Untucked;\" \"", "head = baxter_interface.Head() start_disabled = not self._rs.state().enabled at_goal = lambda:", "actions = {'left': 'tuck', 'right': 'tuck'} disabled = {'left': True,", "\"\"\"Handles ROS shutdown (Ctrl-C) safely.\"\"\" if not self._done: rospy.logwarn('Aborting: Shutting", "Redistributions of source code must retain the above copyright notice,", "already in 'Tucked' position.\") self._done = True return else: rospy.loginfo(\"Tucking:", "= {'left': 'untuck', 'right': 'untuck'} self._move_to(actions, suppress) self._done = True", "of conditions and the following disclaimer. # 2. Redistributions in", "ROS shutdown (Ctrl-C) safely.\"\"\" if not self._done: rospy.logwarn('Aborting: Shutting down", "neutral start position with collision %s.\", \"on\" if any_flipped else", "'right') self._disable_pub = { 'left': rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance', Empty, queue_size=10), 'right':", "if start_disabled: while self._rs.state().enabled == True and not rospy.is_shutdown(): [pub.publish(Empty())", "if any(disabled.values()): [pub.publish(Empty()) for pub in self._disable_pub.values()] while (any(self._arm_state['tuck'][limb] !=", "True return else: rospy.loginfo(\"Tucking: One or more arms not Tucked.\")", "[self._arms[limb].joint_angle(joint) for joint in self._arms[limb].joint_names()] # Check if in a", "help=\"tuck arms\") tuck_group.add_argument(\"-u\", \"--untuck\", dest=\"untuck\", action='store_true', default=False, help=\"untuck arms\") args", "neutral position.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck',", "= 'none' # Check if shoulder is flipped over peak", "behind collision field. If s1 joint is over the peak,", "'collide': {'left': False, 'right': False}, 'flipped': {'left': False, 'right': False}", "over peak self._arm_state['flipped'][limb] = ( self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle)", "Arms already Untucked;\" \" Moving to neutral position.\") self._check_arm_state() suppress", "SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS #", "arms already untucked, move to neutral location else: rospy.loginfo(\"Untucking: Arms", "for pub in self._disable_pub.values()] if not self._rs.state().enabled: self._enable_pub.publish(True) head.set_pan(0.0, 0.5,", "check if arms are tucked self._prepare_to_tuck() self._check_arm_state() # Tuck Arms", "or without # modification, are permitted provided that the following", "in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def _move_to(self, tuck, disabled): if any(disabled.values()):", "HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN", "Redistribution and use in source and binary forms, with or", "source code must retain the above copyright notice, # this", "the following disclaimer in the # documentation and/or other materials", "# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "default=False, help=\"tuck arms\") tuck_group.add_argument(\"-u\", \"--untuck\", dest=\"untuck\", action='store_true', default=False, help=\"untuck arms\")", "Rethink Robotics nor the names of its # contributors may", "parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument(\"-t\",\"--tuck\", dest=\"tuck\", action='store_true', default=False, help=\"tuck arms\") tuck_group.add_argument(\"-u\", \"--untuck\", dest=\"untuck\",", "'right': True} for limb in self._limbs: if not self._arm_state['flipped'][limb]: actions[limb]", "all(self._arm_state['flipped'].values()) if any_flipped: rospy.loginfo( \"Moving to neutral start position with", "start_disabled: [pub.publish(Empty()) for pub in self._disable_pub.values()] if not self._rs.state().enabled: self._enable_pub.publish(True)", "'right': 'untuck'} self._move_to(actions, suppress) self._done = True return def clean_shutdown(self):", "tucked, report this to user and exit. if all(self._arm_state['tuck'][limb] ==", "pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def _move_to(self, tuck, disabled): if", "Hz self._tuck_threshold = 0.2 # radians self._peak_angle = -1.6 #", "True, 'right': True} for limb in self._limbs: if not self._arm_state['flipped'][limb]:", "python # Copyright (c) 2013-2015, Rethink Robotics # All rights", "the Rethink Robotics nor the names of its # contributors", "_check_arm_state(self): \"\"\" Check for goals and behind collision field. If", "One or more arms Tucked;\" \" Disabling Collision Avoidance and", "USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED", "rights reserved. # # Redistribution and use in source and", "tuck = args.tuck rospy.loginfo(\"Initializing node... \") rospy.init_node(\"rsdk_tuck_arms\") rospy.loginfo(\"%sucking arms\" %", "\"Moving to neutral start position with collision %s.\", \"on\" if", "EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE.", "binary form must reproduce the above copyright # notice, this", "neutral location else: rospy.loginfo(\"Untucking: Arms already Untucked;\" \" Moving to", "'untuck' elif all(tuck_goal): self._arm_state['tuck'][limb] = 'tuck' else: self._arm_state['tuck'][limb] = 'none'", "# Check if in a goal position untuck_goal = map(diff_check,", "\") rospy.init_node(\"rsdk_tuck_arms\") rospy.loginfo(\"%sucking arms\" % (\"T\" if tuck else \"Unt\",))", "radians self._peak_angle = -1.6 # radians self._arm_state = { 'tuck':", "= len(data.collision_object) > 0 self._check_arm_state() def _check_arm_state(self): \"\"\" Check for", "position with collision %s.\", \"on\" if any_flipped else \"off\") #", "IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR", "else: # If arms are tucked disable collision and untuck", "self._arm_state['collide'][limb] = len(data.collision_object) > 0 self._check_arm_state() def _check_arm_state(self): \"\"\" Check", "nor the names of its # contributors may be used", "to be disabled to get the arm around the head-arm", "if shoulder is flipped over peak self._arm_state['flipped'][limb] = ( self._arms[limb].joint_angle(limb", "{ 'left': rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance', Empty, queue_size=10), 'right': rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance', Empty,", "map(diff_check, angles[0:2], self._joint_moves['tuck'][limb][0:2]) if all(untuck_goal): self._arm_state['tuck'][limb] = 'untuck' elif all(tuck_goal):", "OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE", "import deepcopy import rospy from std_msgs.msg import ( Empty, Bool,", "user and exit. if all(self._arm_state['tuck'][limb] == 'tuck' for limb in", "else: rospy.loginfo(\"Tucking: One or more arms not Tucked.\") any_flipped =", "arms are tucked self._prepare_to_tuck() self._check_arm_state() # Tuck Arms if self._tuck", "else \"off\") # Move to neutral pose before tucking arms", "# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER", "self._enable_pub.publish(True) head.set_pan(0.0, 0.5, timeout=0) self._tuck_rate.sleep() if start_disabled: while self._rs.state().enabled ==", "angles = [self._arms[limb].joint_angle(joint) for joint in self._arms[limb].joint_names()] # Check if", "<= self._peak_angle) def _prepare_to_tuck(self): # If arms are in \"tucked\"", "Tool to tuck/untuck Baxter's arms to/from the shipping pose \"\"\"", "self._check_arm_state() self._tuck_rate.sleep() if any(self._arm_state['collide'].values()): self._rs.disable() return def supervised_tuck(self): # Update", "rospy.loginfo(\"Tucking: One or more arms not Tucked.\") any_flipped = not", "disclaimer in the # documentation and/or other materials provided with", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" #", "elif all(tuck_goal): self._arm_state['tuck'][limb] = 'tuck' else: self._arm_state['tuck'][limb] = 'none' #", "radians self._arm_state = { 'tuck': {'left': 'none', 'right': 'none'}, 'collide':", "untucking.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck', 'right':", "main(): parser = argparse.ArgumentParser() tuck_group = parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument(\"-t\",\"--tuck\", dest=\"tuck\", action='store_true',", "INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", "CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) #", "arms not Tucked.\") any_flipped = not all(self._arm_state['flipped'].values()) if any_flipped: rospy.loginfo(", "tuck: self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb]))) self._check_arm_state() self._tuck_rate.sleep() if any(self._arm_state['collide'].values()): self._rs.disable() return", "OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS", "arms are already tucked, report this to user and exit.", "above copyright # notice, this list of conditions and the", "def _prepare_to_tuck(self): # If arms are in \"tucked\" state, disable", "PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY", "TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY", "= tuck_cmd self._tuck_rate = rospy.Rate(20.0) # Hz self._tuck_threshold = 0.2", "0.2 # radians self._peak_angle = -1.6 # radians self._arm_state =", "rospy from std_msgs.msg import ( Empty, Bool, ) import baxter_interface", "self._limbs): rospy.loginfo(\"Tucking: Arms already in 'Tucked' position.\") self._done = True", "THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE", "self._collide_lsub = rospy.Subscriber( 'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'left') self._collide_rsub = rospy.Subscriber(", "neutral position\") while not at_goal() and not rospy.is_shutdown(): if start_disabled:", "already tucked, report this to user and exit. if all(self._arm_state['tuck'][limb]", "self._arm_state['flipped'][limb]: actions[limb] = 'untuck' disabled[limb] = False self._move_to(actions, disabled) #", "+ '_s1') <= self._peak_angle) def _prepare_to_tuck(self): # If arms are", "{ 'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50], 'right':", "in source and binary forms, with or without # modification,", "# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", "argparse.ArgumentParser() tuck_group = parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument(\"-t\",\"--tuck\", dest=\"tuck\", action='store_true', default=False, help=\"tuck arms\")", "A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL", "is flipped over peak self._arm_state['flipped'][limb] = ( self._arms[limb].joint_angle(limb + '_s1')", "suppress = deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck', 'right': 'untuck'} self._move_to(actions,", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "permitted provided that the following conditions are met: # #", "0.01, 0.0] }, 'untuck': { 'left': [-0.08, -1.0, -1.19, 1.94,", "off.\") actions = {'left': 'tuck', 'right': 'tuck'} disabled = {'left':", "'Tucked' position.\") self._done = True return else: rospy.loginfo(\"Tucking: One or", "and behind collision field. If s1 joint is over the", "the name of the Rethink Robotics nor the names of", "EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE", "must reproduce the above copyright # notice, this list of", "limb, goal in tuck.viewitems()) and not rospy.is_shutdown()): if self._rs.state().enabled ==", "all(untuck_goal): self._arm_state['tuck'][limb] = 'untuck' elif all(tuck_goal): self._arm_state['tuck'][limb] = 'tuck' else:", "deepcopy import rospy from std_msgs.msg import ( Empty, Bool, )", "If arms already untucked, move to neutral location else: rospy.loginfo(\"Untucking:", "products derived from # this software without specific prior written", "self._done = True return def clean_shutdown(self): \"\"\"Handles ROS shutdown (Ctrl-C)", "self._rs.state().enabled == False: self._enable_pub.publish(True) for limb in self._limbs: if disabled[limb]:", "disabled) self._done = True return # Untuck Arms else: #", "arms Tucked;\" \" Disabling Collision Avoidance and untucking.\") self._check_arm_state() suppress", "tuck_cmd self._tuck_rate = rospy.Rate(20.0) # Hz self._tuck_threshold = 0.2 #", "True: # If arms are already tucked, report this to", "self._collide_rsub = rospy.Subscriber( 'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'right') self._disable_pub = {", "any(self._arm_state['flipped'].values()): rospy.loginfo(\"Untucking: One or more arms Tucked;\" \" Disabling Collision", "this list of conditions and the following disclaimer. # 2.", "# Hz self._tuck_threshold = 0.2 # radians self._peak_angle = -1.6", "# 1. Redistributions of source code must retain the above", "= ('left', 'right') self._arms = { 'left': baxter_interface.Limb('left'), 'right': baxter_interface.Limb('right'),", "use in source and binary forms, with or without #", "if any(self._arm_state['collide'].values()): self._rs.disable() return def supervised_tuck(self): # Update our starting", "rospy.init_node(\"rsdk_tuck_arms\") rospy.loginfo(\"%sucking arms\" % (\"T\" if tuck else \"Unt\",)) tucker", "%s.\", \"on\" if any_flipped else \"off\") # Move to neutral", "deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck', 'right': 'untuck'} self._move_to(actions, suppress) self._done", "self._move_to(actions, disabled) self._done = True return # Untuck Arms else:", "USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #", "'untuck'} self._move_to(actions, suppress) self._done = True return def clean_shutdown(self): \"\"\"Handles", "# radians self._arm_state = { 'tuck': {'left': 'none', 'right': 'none'},", "'right': 'tuck'} disabled = {'left': True, 'right': True} self._move_to(actions, disabled)", "STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING", "'none'}, 'collide': {'left': False, 'right': False}, 'flipped': {'left': False, 'right':", "the # documentation and/or other materials provided with the distribution.", "if disabled[limb]: self._disable_pub[limb].publish(Empty()) if limb in tuck: self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb])))", "EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY", "else: self._arm_state['tuck'][limb] = 'none' # Check if shoulder is flipped", "'robot/limb/right/suppress_collision_avoidance', Empty, queue_size=10) } self._rs = baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub = rospy.Publisher('robot/set_super_enable',", "If arms are in \"tucked\" state, disable collision avoidance #", "False, 'right': False} } self._joint_moves = { 'tuck': { 'left':", "not rospy.is_shutdown(): if start_disabled: [pub.publish(Empty()) for pub in self._disable_pub.values()] if", "distribution. # 3. Neither the name of the Rethink Robotics", "s1 joint is over the peak, collision will need to", "collision field. If s1 joint is over the peak, collision", "code must retain the above copyright notice, # this list", "from \"force-field\". head = baxter_interface.Head() start_disabled = not self._rs.state().enabled at_goal", "# POSSIBILITY OF SUCH DAMAGE. \"\"\" Tool to tuck/untuck Baxter's", "If arms are tucked disable collision and untuck arms if", "# contributors may be used to endorse or promote products", "THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF", "0.01, 0.0], 'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0]", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES", "(any(self._arm_state['tuck'][limb] != goal for limb, goal in tuck.viewitems()) and not", "collision force-field. \"\"\" diff_check = lambda a, b: abs(a -", "'right': 'untuck'} self._move_to(actions, suppress) self._done = True return # If", "'right') self._arms = { 'left': baxter_interface.Limb('left'), 'right': baxter_interface.Limb('right'), } self._tuck", "'right': rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance', Empty, queue_size=10) } self._rs = baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub", "tuck/untuck Baxter's arms to/from the shipping pose \"\"\" import argparse", "std_msgs.msg import ( Empty, Bool, ) import baxter_interface from baxter_core_msgs.msg", "copy import deepcopy import rospy from std_msgs.msg import ( Empty,", "a, b: abs(a - b) <= self._tuck_threshold for limb in", "!= False: [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def", "self._enable_pub.publish(True) for limb in self._limbs: if disabled[limb]: self._disable_pub[limb].publish(Empty()) if limb", ") from baxter_interface import CHECK_VERSION class Tuck(object): def __init__(self, tuck_cmd):", "[1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0] }, 'untuck': {", "self._arm_state['tuck'][limb] = 'untuck' elif all(tuck_goal): self._arm_state['tuck'][limb] = 'tuck' else: self._arm_state['tuck'][limb]", "True return # If arms already untucked, move to neutral", "OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED", "at_goal = lambda: (abs(head.pan()) <= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo(\"Moving head to neutral", "OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "arms to/from the shipping pose \"\"\" import argparse from copy", "peak self._arm_state['flipped'][limb] = ( self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle) def", "to check if arms are tucked self._prepare_to_tuck() self._check_arm_state() # Tuck", "actions[limb] = 'untuck' disabled[limb] = False self._move_to(actions, disabled) # Disable", "for pub in self._disable_pub.values()] while (any(self._arm_state['tuck'][limb] != goal for limb,", "{'left': 'tuck', 'right': 'tuck'} disabled = {'left': True, 'right': True}", "if arms are tucked self._prepare_to_tuck() self._check_arm_state() # Tuck Arms if", "LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION)", "if not self._arm_state['flipped'][limb]: actions[limb] = 'untuck' disabled[limb] = False self._move_to(actions,", "= lambda a, b: abs(a - b) <= self._tuck_threshold for", "with or without # modification, are permitted provided that the", "timeout=0) self._tuck_rate.sleep() if start_disabled: while self._rs.state().enabled == True and not", "# Tuck Arms if self._tuck == True: # If arms", "# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR", "peak, collision will need to be disabled to get the", "2013-2015, Rethink Robotics # All rights reserved. # # Redistribution", "Avoidance and untucking.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left':", "pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def main(): parser = argparse.ArgumentParser()", "Neither the name of the Rethink Robotics nor the names", "are already tucked, report this to user and exit. if", "Untucked;\" \" Moving to neutral position.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped'])", "= False self._limbs = ('left', 'right') self._arms = { 'left':", "disabled to get the arm around the head-arm collision force-field.", "self._done = True return else: rospy.loginfo(\"Tucking: One or more arms", "action='store_true', default=False, help=\"untuck arms\") args = parser.parse_args(rospy.myargv()[1:]) tuck = args.tuck", "disabled): if any(disabled.values()): [pub.publish(Empty()) for pub in self._disable_pub.values()] while (any(self._arm_state['tuck'][limb]", "dict() disabled = {'left': True, 'right': True} for limb in", "over the peak, collision will need to be disabled to", "rospy.is_shutdown(): if start_disabled: [pub.publish(Empty()) for pub in self._disable_pub.values()] if not", "location else: rospy.loginfo(\"Untucking: Arms already Untucked;\" \" Moving to neutral", "False: self._enable_pub.publish(True) for limb in self._limbs: if disabled[limb]: self._disable_pub[limb].publish(Empty()) if", "-1.0, 1.19, 1.94, -0.67, 1.03, 0.50] } } self._collide_lsub =", "collision and Tuck Arms rospy.loginfo(\"Tucking: Tucking with collision avoidance off.\")", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES", "to neutral location else: rospy.loginfo(\"Untucking: Arms already Untucked;\" \" Moving", "NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES;", "class Tuck(object): def __init__(self, tuck_cmd): self._done = False self._limbs =", "Collision Avoidance and untucking.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions =", "POSSIBILITY OF SUCH DAMAGE. \"\"\" Tool to tuck/untuck Baxter's arms", "-0.50], 'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50] }", "# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR", "if limb in tuck: self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb]))) self._check_arm_state() self._tuck_rate.sleep() if", "to/from the shipping pose \"\"\" import argparse from copy import", "self._limbs: if disabled[limb]: self._disable_pub[limb].publish(Empty()) if limb in tuck: self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(),", "DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND", "prior written permission. # # THIS SOFTWARE IS PROVIDED BY", "limb): self._arm_state['collide'][limb] = len(data.collision_object) > 0 self._check_arm_state() def _check_arm_state(self): \"\"\"", "BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF", "tuck_group.add_argument(\"-t\",\"--tuck\", dest=\"tuck\", action='store_true', default=False, help=\"tuck arms\") tuck_group.add_argument(\"-u\", \"--untuck\", dest=\"untuck\", action='store_true',", "self._limbs: if not self._arm_state['flipped'][limb]: actions[limb] = 'untuck' disabled[limb] = False", "self._move_to(actions, disabled) # Disable collision and Tuck Arms rospy.loginfo(\"Tucking: Tucking", "rospy.loginfo(\"Tucking: Arms already in 'Tucked' position.\") self._done = True return", "'untuck' disabled[limb] = False self._move_to(actions, disabled) # Disable collision and", "\"off\") # Move to neutral pose before tucking arms to", "tucked self._prepare_to_tuck() self._check_arm_state() # Tuck Arms if self._tuck == True:", "tuck, disabled): if any(disabled.values()): [pub.publish(Empty()) for pub in self._disable_pub.values()] while", "documentation and/or other materials provided with the distribution. # 3.", "avoid damage self._check_arm_state() actions = dict() disabled = {'left': True,", "if tuck else \"Unt\",)) tucker = Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown) tucker.supervised_tuck() rospy.loginfo(\"Finished", "in a goal position untuck_goal = map(diff_check, angles, self._joint_moves['untuck'][limb]) tuck_goal", "= ( self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle) def _prepare_to_tuck(self): #", "HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR", "( self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle) def _prepare_to_tuck(self): # If", "= 0.2 # radians self._peak_angle = -1.6 # radians self._arm_state", "and use in source and binary forms, with or without", "untucked, move to neutral location else: rospy.loginfo(\"Untucking: Arms already Untucked;\"", "shutdown (Ctrl-C) safely.\"\"\" if not self._done: rospy.logwarn('Aborting: Shutting down safely...')", "<= self._tuck_threshold for limb in self._limbs: angles = [self._arms[limb].joint_angle(joint) for", "(c) 2013-2015, Rethink Robotics # All rights reserved. # #", "COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT,", "= True return # If arms already untucked, move to", "parser = argparse.ArgumentParser() tuck_group = parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument(\"-t\",\"--tuck\", dest=\"tuck\", action='store_true', default=False,", "pub in self._disable_pub.values()] if not self._rs.state().enabled: self._enable_pub.publish(True) head.set_pan(0.0, 0.5, timeout=0)", "'left': baxter_interface.Limb('left'), 'right': baxter_interface.Limb('right'), } self._tuck = tuck_cmd self._tuck_rate =", "untuck arms if any(self._arm_state['flipped'].values()): rospy.loginfo(\"Untucking: One or more arms Tucked;\"", "arms\" % (\"T\" if tuck else \"Unt\",)) tucker = Tuck(tuck)", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE", "OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE", "in \"tucked\" state, disable collision avoidance # before enabling robot,", "actions = dict() disabled = {'left': True, 'right': True} for", "-0.0, 0.01, 0.0] }, 'untuck': { 'left': [-0.08, -1.0, -1.19,", "baxter_interface from baxter_core_msgs.msg import ( CollisionAvoidanceState, ) from baxter_interface import", "Arms rospy.loginfo(\"Tucking: Tucking with collision avoidance off.\") actions = {'left':", "untuck_goal = map(diff_check, angles, self._joint_moves['untuck'][limb]) tuck_goal = map(diff_check, angles[0:2], self._joint_moves['tuck'][limb][0:2])", "b) <= self._tuck_threshold for limb in self._limbs: angles = [self._arms[limb].joint_angle(joint)", "Tuck Arms if self._tuck == True: # If arms are", "'untuck'} self._move_to(actions, suppress) self._done = True return # If arms", "any(disabled.values()): [pub.publish(Empty()) for pub in self._disable_pub.values()] while (any(self._arm_state['tuck'][limb] != goal", "= dict() disabled = {'left': True, 'right': True} for limb", "more arms not Tucked.\") any_flipped = not all(self._arm_state['flipped'].values()) if any_flipped:", "collision avoidance off.\") actions = {'left': 'tuck', 'right': 'tuck'} disabled", "\" Disabling Collision Avoidance and untucking.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped'])", "import rospy from std_msgs.msg import ( Empty, Bool, ) import", "self._tuck = tuck_cmd self._tuck_rate = rospy.Rate(20.0) # Hz self._tuck_threshold =", "if any_flipped else \"off\") # Move to neutral pose before", "disabled) # Disable collision and Tuck Arms rospy.loginfo(\"Tucking: Tucking with", "are met: # # 1. Redistributions of source code must", "OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF", "are in \"tucked\" state, disable collision avoidance # before enabling", "in self._limbs: angles = [self._arms[limb].joint_angle(joint) for joint in self._arms[limb].joint_names()] #", "(abs(head.pan()) <= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo(\"Moving head to neutral position\") while not", "'tuck', 'right': 'tuck'} disabled = {'left': True, 'right': True} self._move_to(actions,", "ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. \"\"\" Tool", "supervised_tuck(self): # Update our starting state to check if arms", "PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "'tuck': { 'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0],", "around the head-arm collision force-field. \"\"\" diff_check = lambda a,", "!= goal for limb, goal in tuck.viewitems()) and not rospy.is_shutdown()):", "limb in self._limbs: if not self._arm_state['flipped'][limb]: actions[limb] = 'untuck' disabled[limb]", "to neutral position.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left':", "[0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50] } } self._collide_lsub", "pose \"\"\" import argparse from copy import deepcopy import rospy", "True} self._move_to(actions, disabled) self._done = True return # Untuck Arms", "= {'left': True, 'right': True} self._move_to(actions, disabled) self._done = True", "in tuck.viewitems()) and not rospy.is_shutdown()): if self._rs.state().enabled == False: self._enable_pub.publish(True)", "= -1.6 # radians self._arm_state = { 'tuck': {'left': 'none',", "avoidance # before enabling robot, to avoid arm jerking from", "be used to endorse or promote products derived from #", "disabled = {'left': True, 'right': True} self._move_to(actions, disabled) self._done =", "forms, with or without # modification, are permitted provided that", "= argparse.ArgumentParser() tuck_group = parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument(\"-t\",\"--tuck\", dest=\"tuck\", action='store_true', default=False, help=\"tuck", "queue_size=10) } self._rs = baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub = rospy.Publisher('robot/set_super_enable', Bool, queue_size=10)", "binary forms, with or without # modification, are permitted provided", "\" Moving to neutral position.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions", "# If arms are already tucked, report this to user", "start_disabled = not self._rs.state().enabled at_goal = lambda: (abs(head.pan()) <= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE)", "\"on\" if any_flipped else \"off\") # Move to neutral pose", "copyright # notice, this list of conditions and the following", "def _update_collision(self, data, limb): self._arm_state['collide'][limb] = len(data.collision_object) > 0 self._check_arm_state()", "data, limb): self._arm_state['collide'][limb] = len(data.collision_object) > 0 self._check_arm_state() def _check_arm_state(self):", "tuck_group = parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument(\"-t\",\"--tuck\", dest=\"tuck\", action='store_true', default=False, help=\"tuck arms\") tuck_group.add_argument(\"-u\",", "self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck', 'right': 'untuck'}", "arms if any(self._arm_state['flipped'].values()): rospy.loginfo(\"Untucking: One or more arms Tucked;\" \"", "self._joint_moves[tuck[limb]][limb]))) self._check_arm_state() self._tuck_rate.sleep() if any(self._arm_state['collide'].values()): self._rs.disable() return def supervised_tuck(self): #", "= lambda: (abs(head.pan()) <= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo(\"Moving head to neutral position\")", "-2.07, 3.0, 2.55, 0.0, 0.01, 0.0], 'right': [1.0, -2.07, -3.0,", "provided that the following conditions are met: # # 1.", "self._peak_angle = -1.6 # radians self._arm_state = { 'tuck': {'left':", "2.55, 0.0, 0.01, 0.0], 'right': [1.0, -2.07, -3.0, 2.55, -0.0,", "specific prior written permission. # # THIS SOFTWARE IS PROVIDED", "contributors may be used to endorse or promote products derived", "= True return # Untuck Arms else: # If arms", "if not self._done: rospy.logwarn('Aborting: Shutting down safely...') if any(self._arm_state['collide'].values()): while", "self._enable_pub = rospy.Publisher('robot/set_super_enable', Bool, queue_size=10) def _update_collision(self, data, limb): self._arm_state['collide'][limb]", "a goal position untuck_goal = map(diff_check, angles, self._joint_moves['untuck'][limb]) tuck_goal =", "self._update_collision, 'right') self._disable_pub = { 'left': rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance', Empty, queue_size=10),", "baxter_interface.Limb('right'), } self._tuck = tuck_cmd self._tuck_rate = rospy.Rate(20.0) # Hz", "goal for limb, goal in tuck.viewitems()) and not rospy.is_shutdown()): if", "from std_msgs.msg import ( Empty, Bool, ) import baxter_interface from", "0.50] } } self._collide_lsub = rospy.Subscriber( 'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'left')", "= {'left': True, 'right': True} for limb in self._limbs: if", "to avoid damage self._check_arm_state() actions = dict() disabled = {'left':", "Tucked;\" \" Disabling Collision Avoidance and untucking.\") self._check_arm_state() suppress =", "baxter_core_msgs.msg import ( CollisionAvoidanceState, ) from baxter_interface import CHECK_VERSION class", "arms are tucked disable collision and untuck arms if any(self._arm_state['flipped'].values()):", "from copy import deepcopy import rospy from std_msgs.msg import (", "rospy.loginfo(\"Initializing node... \") rospy.init_node(\"rsdk_tuck_arms\") rospy.loginfo(\"%sucking arms\" % (\"T\" if tuck", "name of the Rethink Robotics nor the names of its", "__init__(self, tuck_cmd): self._done = False self._limbs = ('left', 'right') self._arms", "= args.tuck rospy.loginfo(\"Initializing node... \") rospy.init_node(\"rsdk_tuck_arms\") rospy.loginfo(\"%sucking arms\" % (\"T\"", "1.19, 1.94, -0.67, 1.03, 0.50] } } self._collide_lsub = rospy.Subscriber(", "already untucked, move to neutral location else: rospy.loginfo(\"Untucking: Arms already", "0.67, 1.03, -0.50], 'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03,", "self._tuck_rate.sleep() def main(): parser = argparse.ArgumentParser() tuck_group = parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument(\"-t\",\"--tuck\",", "rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance', Empty, queue_size=10) } self._rs = baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub =", "the above copyright # notice, this list of conditions and", "position.\") self._done = True return else: rospy.loginfo(\"Tucking: One or more", "if any_flipped: rospy.loginfo( \"Moving to neutral start position with collision", "# If arms already untucked, move to neutral location else:", "self._rs.state().enabled != False: [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep()", "are permitted provided that the following conditions are met: #", "the head-arm collision force-field. \"\"\" diff_check = lambda a, b:", "-1.19, 1.94, 0.67, 1.03, -0.50], 'right': [0.08, -1.0, 1.19, 1.94,", "# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)", "self._arm_state['tuck'][limb] = 'tuck' else: self._arm_state['tuck'][limb] = 'none' # Check if", "disable collision avoidance # before enabling robot, to avoid arm", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE #", "met: # # 1. Redistributions of source code must retain", "ARISING IN ANY WAY OUT OF THE USE OF THIS", "baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo(\"Moving head to neutral position\") while not at_goal() and", "= True return def clean_shutdown(self): \"\"\"Handles ROS shutdown (Ctrl-C) safely.\"\"\"", "-0.67, 1.03, 0.50] } } self._collide_lsub = rospy.Subscriber( 'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState,", "# Update our starting state to check if arms are", "# All rights reserved. # # Redistribution and use in", "ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN", "1.03, 0.50] } } self._collide_lsub = rospy.Subscriber( 'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState, self._update_collision,", "tuck_cmd): self._done = False self._limbs = ('left', 'right') self._arms =", "LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS", "len(data.collision_object) > 0 self._check_arm_state() def _check_arm_state(self): \"\"\" Check for goals", "_move_to(self, tuck, disabled): if any(disabled.values()): [pub.publish(Empty()) for pub in self._disable_pub.values()]", "rospy.Subscriber( 'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'right') self._disable_pub = { 'left': rospy.Publisher(", "diff_check = lambda a, b: abs(a - b) <= self._tuck_threshold", "not Tucked.\") any_flipped = not all(self._arm_state['flipped'].values()) if any_flipped: rospy.loginfo( \"Moving", "in self._disable_pub.values()] while (any(self._arm_state['tuck'][limb] != goal for limb, goal in", "= 'untuck' elif all(tuck_goal): self._arm_state['tuck'][limb] = 'tuck' else: self._arm_state['tuck'][limb] =", "False self._move_to(actions, disabled) # Disable collision and Tuck Arms rospy.loginfo(\"Tucking:", "= rospy.Publisher('robot/set_super_enable', Bool, queue_size=10) def _update_collision(self, data, limb): self._arm_state['collide'][limb] =", "One or more arms not Tucked.\") any_flipped = not all(self._arm_state['flipped'].values())", "SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR", "if any(self._arm_state['collide'].values()): while self._rs.state().enabled != False: [pub.publish(Empty()) for pub in", "# Redistribution and use in source and binary forms, with", "self._tuck_threshold = 0.2 # radians self._peak_angle = -1.6 # radians", "\"--untuck\", dest=\"untuck\", action='store_true', default=False, help=\"untuck arms\") args = parser.parse_args(rospy.myargv()[1:]) tuck", "the above copyright notice, # this list of conditions and", "# Untuck Arms else: # If arms are tucked disable", "down safely...') if any(self._arm_state['collide'].values()): while self._rs.state().enabled != False: [pub.publish(Empty()) for", "state to check if arms are tucked self._prepare_to_tuck() self._check_arm_state() #", "conditions are met: # # 1. Redistributions of source code", "all(tuck_goal): self._arm_state['tuck'][limb] = 'tuck' else: self._arm_state['tuck'][limb] = 'none' # Check", "1.03, -0.50], 'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50]", "{ 'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0], 'right':", "not rospy.is_shutdown()): if self._rs.state().enabled == False: self._enable_pub.publish(True) for limb in", "True return # Untuck Arms else: # If arms are", "and not rospy.is_shutdown(): [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep()", "0.5, timeout=0) self._tuck_rate.sleep() if start_disabled: while self._rs.state().enabled == True and", "rospy.loginfo(\"%sucking arms\" % (\"T\" if tuck else \"Unt\",)) tucker =", "LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN", "if start_disabled: [pub.publish(Empty()) for pub in self._disable_pub.values()] if not self._rs.state().enabled:", "or more arms not Tucked.\") any_flipped = not all(self._arm_state['flipped'].values()) if", "tuck_goal = map(diff_check, angles[0:2], self._joint_moves['tuck'][limb][0:2]) if all(untuck_goal): self._arm_state['tuck'][limb] = 'untuck'", "False}, 'flipped': {'left': False, 'right': False} } self._joint_moves = {", "not self._arm_state['flipped'][limb]: actions[limb] = 'untuck' disabled[limb] = False self._move_to(actions, disabled)", "Robotics # All rights reserved. # # Redistribution and use", "avoid arm jerking from \"force-field\". head = baxter_interface.Head() start_disabled =", "# modification, are permitted provided that the following conditions are", "'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'left') self._collide_rsub = rospy.Subscriber( 'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState, self._update_collision,", "{'left': 'none', 'right': 'none'}, 'collide': {'left': False, 'right': False}, 'flipped':", "self._move_to(actions, suppress) self._done = True return def clean_shutdown(self): \"\"\"Handles ROS", "self._disable_pub = { 'left': rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance', Empty, queue_size=10), 'right': rospy.Publisher(", "self._done: rospy.logwarn('Aborting: Shutting down safely...') if any(self._arm_state['collide'].values()): while self._rs.state().enabled !=", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING,", "OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT", "of the Rethink Robotics nor the names of its #", "Baxter's arms to/from the shipping pose \"\"\" import argparse from", "conditions and the following disclaimer in the # documentation and/or", "tuck.viewitems()) and not rospy.is_shutdown()): if self._rs.state().enabled == False: self._enable_pub.publish(True) for", "OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,", "return else: rospy.loginfo(\"Tucking: One or more arms not Tucked.\") any_flipped", "not self._done: rospy.logwarn('Aborting: Shutting down safely...') if any(self._arm_state['collide'].values()): while self._rs.state().enabled", "Empty, Bool, ) import baxter_interface from baxter_core_msgs.msg import ( CollisionAvoidanceState,", "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS", "LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING", "= True return else: rospy.loginfo(\"Tucking: One or more arms not", "'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0], 'right': [1.0,", "CollisionAvoidanceState, self._update_collision, 'left') self._collide_rsub = rospy.Subscriber( 'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'right')", "self._tuck == True: # If arms are already tucked, report", ") import baxter_interface from baxter_core_msgs.msg import ( CollisionAvoidanceState, ) from", "angles, self._joint_moves['untuck'][limb]) tuck_goal = map(diff_check, angles[0:2], self._joint_moves['tuck'][limb][0:2]) if all(untuck_goal): self._arm_state['tuck'][limb]", "> 0 self._check_arm_state() def _check_arm_state(self): \"\"\" Check for goals and", "may be used to endorse or promote products derived from", "before enabling robot, to avoid arm jerking from \"force-field\". head", "that the following conditions are met: # # 1. Redistributions", "} self._rs = baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub = rospy.Publisher('robot/set_super_enable', Bool, queue_size=10) def", "self._done = True return # If arms already untucked, move", "rospy.Rate(20.0) # Hz self._tuck_threshold = 0.2 # radians self._peak_angle =", "}, 'untuck': { 'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03,", "head to neutral position\") while not at_goal() and not rospy.is_shutdown():", "{'left': True, 'right': True} self._move_to(actions, disabled) self._done = True return", "def clean_shutdown(self): \"\"\"Handles ROS shutdown (Ctrl-C) safely.\"\"\" if not self._done:", "report this to user and exit. if all(self._arm_state['tuck'][limb] == 'tuck'", "False} } self._joint_moves = { 'tuck': { 'left': [-1.0, -2.07,", "dest=\"tuck\", action='store_true', default=False, help=\"tuck arms\") tuck_group.add_argument(\"-u\", \"--untuck\", dest=\"untuck\", action='store_true', default=False,", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED", "or promote products derived from # this software without specific", "self._arm_state['tuck'][limb] = 'none' # Check if shoulder is flipped over", "self._limbs: angles = [self._arms[limb].joint_angle(joint) for joint in self._arms[limb].joint_names()] # Check", "of conditions and the following disclaimer in the # documentation", "OF SUCH DAMAGE. \"\"\" Tool to tuck/untuck Baxter's arms to/from", "copyright notice, # this list of conditions and the following", "baxter_interface import CHECK_VERSION class Tuck(object): def __init__(self, tuck_cmd): self._done =", "if self._tuck == True: # If arms are already tucked,", "def main(): parser = argparse.ArgumentParser() tuck_group = parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument(\"-t\",\"--tuck\", dest=\"tuck\",", "Check for goals and behind collision field. If s1 joint", "start position with collision %s.\", \"on\" if any_flipped else \"off\")", "If arms are already tucked, report this to user and", "SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH", "-1.0, -1.19, 1.94, 0.67, 1.03, -0.50], 'right': [0.08, -1.0, 1.19,", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE", "{'left': False, 'right': False} } self._joint_moves = { 'tuck': {", "the names of its # contributors may be used to", "any(self._arm_state['collide'].values()): self._rs.disable() return def supervised_tuck(self): # Update our starting state", "INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF", "# 3. Neither the name of the Rethink Robotics nor", "conditions and the following disclaimer. # 2. Redistributions in binary", "help=\"untuck arms\") args = parser.parse_args(rospy.myargv()[1:]) tuck = args.tuck rospy.loginfo(\"Initializing node...", "return def clean_shutdown(self): \"\"\"Handles ROS shutdown (Ctrl-C) safely.\"\"\" if not", "NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE #", "and the following disclaimer. # 2. Redistributions in binary form", "Bool, queue_size=10) def _update_collision(self, data, limb): self._arm_state['collide'][limb] = len(data.collision_object) >", "more arms Tucked;\" \" Disabling Collision Avoidance and untucking.\") self._check_arm_state()", "'right': False}, 'flipped': {'left': False, 'right': False} } self._joint_moves =", "} self._joint_moves = { 'tuck': { 'left': [-1.0, -2.07, 3.0,", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "tucker = Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown) tucker.supervised_tuck() rospy.loginfo(\"Finished tuck\") if __name__ ==", "promote products derived from # this software without specific prior", "( CollisionAvoidanceState, ) from baxter_interface import CHECK_VERSION class Tuck(object): def", "arm around the head-arm collision force-field. \"\"\" diff_check = lambda", "while (any(self._arm_state['tuck'][limb] != goal for limb, goal in tuck.viewitems()) and", "the following disclaimer. # 2. Redistributions in binary form must", "import baxter_interface from baxter_core_msgs.msg import ( CollisionAvoidanceState, ) from baxter_interface", "} self._tuck = tuck_cmd self._tuck_rate = rospy.Rate(20.0) # Hz self._tuck_threshold", "tucked disable collision and untuck arms if any(self._arm_state['flipped'].values()): rospy.loginfo(\"Untucking: One", "following disclaimer. # 2. Redistributions in binary form must reproduce", "= not self._rs.state().enabled at_goal = lambda: (abs(head.pan()) <= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo(\"Moving", "-2.07, -3.0, 2.55, -0.0, 0.01, 0.0] }, 'untuck': { 'left':", "flipped over peak self._arm_state['flipped'][limb] = ( self._arms[limb].joint_angle(limb + '_s1') <=", "# Move to neutral pose before tucking arms to avoid", "Move to neutral pose before tucking arms to avoid damage", "are tucked disable collision and untuck arms if any(self._arm_state['flipped'].values()): rospy.loginfo(\"Untucking:", "return # Untuck Arms else: # If arms are tucked", "this to user and exit. if all(self._arm_state['tuck'][limb] == 'tuck' for", "OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER", "('left', 'right') self._arms = { 'left': baxter_interface.Limb('left'), 'right': baxter_interface.Limb('right'), }", "Arms if self._tuck == True: # If arms are already", "LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "to avoid arm jerking from \"force-field\". head = baxter_interface.Head() start_disabled", "baxter_interface.Limb('left'), 'right': baxter_interface.Limb('right'), } self._tuck = tuck_cmd self._tuck_rate = rospy.Rate(20.0)", "\"\"\" import argparse from copy import deepcopy import rospy from", "notice, this list of conditions and the following disclaimer in", "else: rospy.loginfo(\"Untucking: Arms already Untucked;\" \" Moving to neutral position.\")", "'robot/limb/left/suppress_collision_avoidance', Empty, queue_size=10), 'right': rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance', Empty, queue_size=10) } self._rs", "for limb in self._limbs: angles = [self._arms[limb].joint_angle(joint) for joint in", "IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,", "def _move_to(self, tuck, disabled): if any(disabled.values()): [pub.publish(Empty()) for pub in", "self._disable_pub[limb].publish(Empty()) if limb in tuck: self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb]))) self._check_arm_state() self._tuck_rate.sleep()", "collision will need to be disabled to get the arm", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR #", "retain the above copyright notice, # this list of conditions", "{ 'tuck': {'left': 'none', 'right': 'none'}, 'collide': {'left': False, 'right':", "self._joint_moves['tuck'][limb][0:2]) if all(untuck_goal): self._arm_state['tuck'][limb] = 'untuck' elif all(tuck_goal): self._arm_state['tuck'][limb] =", "All rights reserved. # # Redistribution and use in source", "arms\") tuck_group.add_argument(\"-u\", \"--untuck\", dest=\"untuck\", action='store_true', default=False, help=\"untuck arms\") args =", "in self._disable_pub.values()] if not self._rs.state().enabled: self._enable_pub.publish(True) head.set_pan(0.0, 0.5, timeout=0) self._tuck_rate.sleep()", "jerking from \"force-field\". head = baxter_interface.Head() start_disabled = not self._rs.state().enabled", "and/or other materials provided with the distribution. # 3. Neither", "without # modification, are permitted provided that the following conditions", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS", "OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY", "self._check_arm_state() def _check_arm_state(self): \"\"\" Check for goals and behind collision", "limb in tuck: self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb]))) self._check_arm_state() self._tuck_rate.sleep() if any(self._arm_state['collide'].values()):", "{'left': False, 'right': False}, 'flipped': {'left': False, 'right': False} }", "shipping pose \"\"\" import argparse from copy import deepcopy import", "our starting state to check if arms are tucked self._prepare_to_tuck()", "# notice, this list of conditions and the following disclaimer", "# radians self._peak_angle = -1.6 # radians self._arm_state = {", "self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb]))) self._check_arm_state() self._tuck_rate.sleep() if any(self._arm_state['collide'].values()): self._rs.disable() return def", "the following conditions are met: # # 1. Redistributions of", "WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE", "this list of conditions and the following disclaimer in the", "used to endorse or promote products derived from # this", "self._limbs = ('left', 'right') self._arms = { 'left': baxter_interface.Limb('left'), 'right':", "self._check_arm_state() # Tuck Arms if self._tuck == True: # If", "'right': baxter_interface.Limb('right'), } self._tuck = tuck_cmd self._tuck_rate = rospy.Rate(20.0) #", "modification, are permitted provided that the following conditions are met:", "PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", "OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT,", "enabling robot, to avoid arm jerking from \"force-field\". head =", "self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb]))) self._check_arm_state() self._tuck_rate.sleep() if any(self._arm_state['collide'].values()): self._rs.disable() return def supervised_tuck(self):", "are tucked self._prepare_to_tuck() self._check_arm_state() # Tuck Arms if self._tuck ==", "dest=\"untuck\", action='store_true', default=False, help=\"untuck arms\") args = parser.parse_args(rospy.myargv()[1:]) tuck =", "and not rospy.is_shutdown(): if start_disabled: [pub.publish(Empty()) for pub in self._disable_pub.values()]", "CollisionAvoidanceState, self._update_collision, 'right') self._disable_pub = { 'left': rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance', Empty,", "self._done = True return # Untuck Arms else: # If", "# ARISING IN ANY WAY OUT OF THE USE OF", "= { 'tuck': { 'left': [-1.0, -2.07, 3.0, 2.55, 0.0,", "_prepare_to_tuck(self): # If arms are in \"tucked\" state, disable collision", "pose before tucking arms to avoid damage self._check_arm_state() actions =", "rospy.is_shutdown()): if self._rs.state().enabled == False: self._enable_pub.publish(True) for limb in self._limbs:", "import argparse from copy import deepcopy import rospy from std_msgs.msg", "self._check_arm_state() actions = dict() disabled = {'left': True, 'right': True}", "AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN", "self._done = False self._limbs = ('left', 'right') self._arms = {", "if in a goal position untuck_goal = map(diff_check, angles, self._joint_moves['untuck'][limb])", "Robotics nor the names of its # contributors may be", "def _check_arm_state(self): \"\"\" Check for goals and behind collision field.", "position\") while not at_goal() and not rospy.is_shutdown(): if start_disabled: [pub.publish(Empty())", "reserved. # # Redistribution and use in source and binary", "other materials provided with the distribution. # 3. Neither the", "for limb, goal in tuck.viewitems()) and not rospy.is_shutdown()): if self._rs.state().enabled", "limb in self._limbs: if disabled[limb]: self._disable_pub[limb].publish(Empty()) if limb in tuck:", "'tuck'} disabled = {'left': True, 'right': True} self._move_to(actions, disabled) self._done", "self._rs.state().enabled == True and not rospy.is_shutdown(): [pub.publish(Empty()) for pub in", "= map(diff_check, angles[0:2], self._joint_moves['tuck'][limb][0:2]) if all(untuck_goal): self._arm_state['tuck'][limb] = 'untuck' elif", "ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY,", "arm jerking from \"force-field\". head = baxter_interface.Head() start_disabled = not", "in self._limbs: if disabled[limb]: self._disable_pub[limb].publish(Empty()) if limb in tuck: self._arms[limb].set_joint_positions(dict(zip(", "argparse from copy import deepcopy import rospy from std_msgs.msg import", "Disable collision and Tuck Arms rospy.loginfo(\"Tucking: Tucking with collision avoidance", "ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT", "or more arms Tucked;\" \" Disabling Collision Avoidance and untucking.\")", "'right': 'none'}, 'collide': {'left': False, 'right': False}, 'flipped': {'left': False,", "\"Unt\",)) tucker = Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown) tucker.supervised_tuck() rospy.loginfo(\"Finished tuck\") if __name__", "while self._rs.state().enabled != False: [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False)", "shoulder is flipped over peak self._arm_state['flipped'][limb] = ( self._arms[limb].joint_angle(limb +", "if all(untuck_goal): self._arm_state['tuck'][limb] = 'untuck' elif all(tuck_goal): self._arm_state['tuck'][limb] = 'tuck'", "} self._collide_lsub = rospy.Subscriber( 'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'left') self._collide_rsub =", "# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "if all(self._arm_state['tuck'][limb] == 'tuck' for limb in self._limbs): rospy.loginfo(\"Tucking: Arms", "from baxter_interface import CHECK_VERSION class Tuck(object): def __init__(self, tuck_cmd): self._done", "self._rs.disable() return def supervised_tuck(self): # Update our starting state to", "'right': True} self._move_to(actions, disabled) self._done = True return # Untuck", "\"tucked\" state, disable collision avoidance # before enabling robot, to", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #", "= { 'tuck': {'left': 'none', 'right': 'none'}, 'collide': {'left': False,", "the distribution. # 3. Neither the name of the Rethink", "Tucking with collision avoidance off.\") actions = {'left': 'tuck', 'right':", "SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"", "rospy.loginfo(\"Moving head to neutral position\") while not at_goal() and not", "lambda a, b: abs(a - b) <= self._tuck_threshold for limb", "in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def main(): parser = argparse.ArgumentParser() tuck_group", "def __init__(self, tuck_cmd): self._done = False self._limbs = ('left', 'right')", "'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50], 'right': [0.08,", "safely.\"\"\" if not self._done: rospy.logwarn('Aborting: Shutting down safely...') if any(self._arm_state['collide'].values()):", "list of conditions and the following disclaimer. # 2. Redistributions", "and not rospy.is_shutdown()): if self._rs.state().enabled == False: self._enable_pub.publish(True) for limb", "import ( Empty, Bool, ) import baxter_interface from baxter_core_msgs.msg import", "[-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0], 'right': [1.0, -2.07,", "Arms else: # If arms are tucked disable collision and", "position.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck', 'right':", "for joint in self._arms[limb].joint_names()] # Check if in a goal", "actions = {'left': 'untuck', 'right': 'untuck'} self._move_to(actions, suppress) self._done =", "materials provided with the distribution. # 3. Neither the name", "head-arm collision force-field. \"\"\" diff_check = lambda a, b: abs(a", "to get the arm around the head-arm collision force-field. \"\"\"", "'none', 'right': 'none'}, 'collide': {'left': False, 'right': False}, 'flipped': {'left':", "parser.parse_args(rospy.myargv()[1:]) tuck = args.tuck rospy.loginfo(\"Initializing node... \") rospy.init_node(\"rsdk_tuck_arms\") rospy.loginfo(\"%sucking arms\"", "1.94, 0.67, 1.03, -0.50], 'right': [0.08, -1.0, 1.19, 1.94, -0.67,", "= {'left': 'tuck', 'right': 'tuck'} disabled = {'left': True, 'right':", "permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "not all(self._arm_state['flipped'].values()) if any_flipped: rospy.loginfo( \"Moving to neutral start position", "if any(self._arm_state['flipped'].values()): rospy.loginfo(\"Untucking: One or more arms Tucked;\" \" Disabling", "False self._limbs = ('left', 'right') self._arms = { 'left': baxter_interface.Limb('left'),", "self._move_to(actions, suppress) self._done = True return # If arms already", "clean_shutdown(self): \"\"\"Handles ROS shutdown (Ctrl-C) safely.\"\"\" if not self._done: rospy.logwarn('Aborting:", "False, 'right': False}, 'flipped': {'left': False, 'right': False} } self._joint_moves", "tucking arms to avoid damage self._check_arm_state() actions = dict() disabled", "Moving to neutral position.\") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions =", "collision %s.\", \"on\" if any_flipped else \"off\") # Move to", "FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT", "the arm around the head-arm collision force-field. \"\"\" diff_check =", "1.94, -0.67, 1.03, 0.50] } } self._collide_lsub = rospy.Subscriber( 'robot/limb/left/collision_avoidance_state',", "\"\"\" Tool to tuck/untuck Baxter's arms to/from the shipping pose", "Rethink Robotics # All rights reserved. # # Redistribution and", "rospy.Publisher('robot/set_super_enable', Bool, queue_size=10) def _update_collision(self, data, limb): self._arm_state['collide'][limb] = len(data.collision_object)", "{'left': 'untuck', 'right': 'untuck'} self._move_to(actions, suppress) self._done = True return", "to endorse or promote products derived from # this software", "provided with the distribution. # 3. Neither the name of", "'tuck' else: self._arm_state['tuck'][limb] = 'none' # Check if shoulder is", "self._disable_pub.values()] if not self._rs.state().enabled: self._enable_pub.publish(True) head.set_pan(0.0, 0.5, timeout=0) self._tuck_rate.sleep() if", "# If arms are in \"tucked\" state, disable collision avoidance", "Redistributions in binary form must reproduce the above copyright #", "NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND", "queue_size=10), 'right': rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance', Empty, queue_size=10) } self._rs = baxter_interface.RobotEnable(CHECK_VERSION)", "goals and behind collision field. If s1 joint is over", "[pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def _move_to(self, tuck,", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;", "True return def clean_shutdown(self): \"\"\"Handles ROS shutdown (Ctrl-C) safely.\"\"\" if", "FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO", "for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def main(): parser =", "False: [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def main():", "BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY", "its # contributors may be used to endorse or promote", "self._prepare_to_tuck() self._check_arm_state() # Tuck Arms if self._tuck == True: #", "OF THE # POSSIBILITY OF SUCH DAMAGE. \"\"\" Tool to", "0.0], 'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0] },", "IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "self._disable_pub.values()] while (any(self._arm_state['tuck'][limb] != goal for limb, goal in tuck.viewitems())", "-1.6 # radians self._arm_state = { 'tuck': {'left': 'none', 'right':", "1. Redistributions of source code must retain the above copyright", "( Empty, Bool, ) import baxter_interface from baxter_core_msgs.msg import (", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "self._joint_moves['untuck'][limb]) tuck_goal = map(diff_check, angles[0:2], self._joint_moves['tuck'][limb][0:2]) if all(untuck_goal): self._arm_state['tuck'][limb] =", "= parser.parse_args(rospy.myargv()[1:]) tuck = args.tuck rospy.loginfo(\"Initializing node... \") rospy.init_node(\"rsdk_tuck_arms\") rospy.loginfo(\"%sucking", "def supervised_tuck(self): # Update our starting state to check if", "PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE", "(INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT", "of its # contributors may be used to endorse or", "pub in self._disable_pub.values()] while (any(self._arm_state['tuck'][limb] != goal for limb, goal", "== False: self._enable_pub.publish(True) for limb in self._limbs: if disabled[limb]: self._disable_pub[limb].publish(Empty())", "default=False, help=\"untuck arms\") args = parser.parse_args(rospy.myargv()[1:]) tuck = args.tuck rospy.loginfo(\"Initializing", "disable collision and untuck arms if any(self._arm_state['flipped'].values()): rospy.loginfo(\"Untucking: One or", "OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY", "THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "# Copyright (c) 2013-2015, Rethink Robotics # All rights reserved.", "[pub.publish(Empty()) for pub in self._disable_pub.values()] if not self._rs.state().enabled: self._enable_pub.publish(True) head.set_pan(0.0,", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS", "return # If arms already untucked, move to neutral location", "and exit. if all(self._arm_state['tuck'][limb] == 'tuck' for limb in self._limbs):", "with the distribution. # 3. Neither the name of the", "self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def _move_to(self, tuck, disabled): if any(disabled.values()): [pub.publish(Empty())", "action='store_true', default=False, help=\"tuck arms\") tuck_group.add_argument(\"-u\", \"--untuck\", dest=\"untuck\", action='store_true', default=False, help=\"untuck", "following disclaimer in the # documentation and/or other materials provided", "'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'right') self._disable_pub = { 'left': rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance',", "with collision avoidance off.\") actions = {'left': 'tuck', 'right': 'tuck'}", "# If arms are tucked disable collision and untuck arms", "INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY", "neutral pose before tucking arms to avoid damage self._check_arm_state() actions", "self._rs.state().enabled: self._enable_pub.publish(True) head.set_pan(0.0, 0.5, timeout=0) self._tuck_rate.sleep() if start_disabled: while self._rs.state().enabled", "THE # POSSIBILITY OF SUCH DAMAGE. \"\"\" Tool to tuck/untuck", "_update_collision(self, data, limb): self._arm_state['collide'][limb] = len(data.collision_object) > 0 self._check_arm_state() def", "angles[0:2], self._joint_moves['tuck'][limb][0:2]) if all(untuck_goal): self._arm_state['tuck'][limb] = 'untuck' elif all(tuck_goal): self._arm_state['tuck'][limb]", "Copyright (c) 2013-2015, Rethink Robotics # All rights reserved. #", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF #", "derived from # this software without specific prior written permission.", "b: abs(a - b) <= self._tuck_threshold for limb in self._limbs:", "CollisionAvoidanceState, ) from baxter_interface import CHECK_VERSION class Tuck(object): def __init__(self,", "\"\"\" Check for goals and behind collision field. If s1", "OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON", "Tuck(object): def __init__(self, tuck_cmd): self._done = False self._limbs = ('left',", "'untuck', 'right': 'untuck'} self._move_to(actions, suppress) self._done = True return def", "SUCH DAMAGE. \"\"\" Tool to tuck/untuck Baxter's arms to/from the", "2. Redistributions in binary form must reproduce the above copyright", "= parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument(\"-t\",\"--tuck\", dest=\"tuck\", action='store_true', default=False, help=\"tuck arms\") tuck_group.add_argument(\"-u\", \"--untuck\",", "to user and exit. if all(self._arm_state['tuck'][limb] == 'tuck' for limb", "arms to avoid damage self._check_arm_state() actions = dict() disabled =", "Check if shoulder is flipped over peak self._arm_state['flipped'][limb] = (", "must retain the above copyright notice, # this list of", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED.", "OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT", "= not all(self._arm_state['flipped'].values()) if any_flipped: rospy.loginfo( \"Moving to neutral start", "in self._arms[limb].joint_names()] # Check if in a goal position untuck_goal", "(Ctrl-C) safely.\"\"\" if not self._done: rospy.logwarn('Aborting: Shutting down safely...') if", "baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub = rospy.Publisher('robot/set_super_enable', Bool, queue_size=10) def _update_collision(self, data, limb):", "suppress) self._done = True return # If arms already untucked,", "\"force-field\". head = baxter_interface.Head() start_disabled = not self._rs.state().enabled at_goal =", "If s1 joint is over the peak, collision will need", "- b) <= self._tuck_threshold for limb in self._limbs: angles =", "self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def main(): parser = argparse.ArgumentParser() tuck_group =", "and the following disclaimer in the # documentation and/or other", "self._arms = { 'left': baxter_interface.Limb('left'), 'right': baxter_interface.Limb('right'), } self._tuck =", "queue_size=10) def _update_collision(self, data, limb): self._arm_state['collide'][limb] = len(data.collision_object) > 0", "TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF", "exit. if all(self._arm_state['tuck'][limb] == 'tuck' for limb in self._limbs): rospy.loginfo(\"Tucking:", "tuck_group.add_argument(\"-u\", \"--untuck\", dest=\"untuck\", action='store_true', default=False, help=\"untuck arms\") args = parser.parse_args(rospy.myargv()[1:])", "'_s1') <= self._peak_angle) def _prepare_to_tuck(self): # If arms are in", "'untuck': { 'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50],", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND", "= baxter_interface.Head() start_disabled = not self._rs.state().enabled at_goal = lambda: (abs(head.pan())", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY", "in 'Tucked' position.\") self._done = True return else: rospy.loginfo(\"Tucking: One", "# 2. Redistributions in binary form must reproduce the above", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR" ]
[ "<filename>django-system/src/tsm_api/serializers.py from rest_framework import serializers from .models import Measurement class", "import Measurement class MeasurementSerializer(serializers.ModelSerializer): class Meta: model = Measurement fields", "from rest_framework import serializers from .models import Measurement class MeasurementSerializer(serializers.ModelSerializer):", "class MeasurementSerializer(serializers.ModelSerializer): class Meta: model = Measurement fields = '__all__'", "rest_framework import serializers from .models import Measurement class MeasurementSerializer(serializers.ModelSerializer): class", "serializers from .models import Measurement class MeasurementSerializer(serializers.ModelSerializer): class Meta: model", "Measurement class MeasurementSerializer(serializers.ModelSerializer): class Meta: model = Measurement fields =", "import serializers from .models import Measurement class MeasurementSerializer(serializers.ModelSerializer): class Meta:", "from .models import Measurement class MeasurementSerializer(serializers.ModelSerializer): class Meta: model =", ".models import Measurement class MeasurementSerializer(serializers.ModelSerializer): class Meta: model = Measurement" ]
[]
[ "class Solution(object): def nextPermutation(self, nums): \"\"\" :type nums: List[int] :rtype:", "n if t == 0: nums[:] = nums[::-1] return x", "= nums[n-1] while t < len(nums) and x < nums[t]:", "<reponame>tedye/leetcode<gh_stars>1-10 class Solution(object): def nextPermutation(self, nums): \"\"\" :type nums: List[int]", "while t < len(nums) and x < nums[t]: t +=", "return n = len(nums)-1 while n > 0 and nums[n-1]", "x < nums[t]: t += 1 temp = nums[t-1] nums[t-1]", "= nums[t-1] nums[t-1] = nums[n-1] nums[n-1] = temp nums[n:] =", "= len(nums)-1 while n > 0 and nums[n-1] >= nums[n]:", "and nums[n-1] >= nums[n]: n -= 1 t = n", "instead. \"\"\" if not nums: return n = len(nums)-1 while", "x = nums[n-1] while t < len(nums) and x <", "nums: return n = len(nums)-1 while n > 0 and", "in-place instead. \"\"\" if not nums: return n = len(nums)-1", "anything, modify nums in-place instead. \"\"\" if not nums: return", "nextPermutation(self, nums): \"\"\" :type nums: List[int] :rtype: void Do not", "t == 0: nums[:] = nums[::-1] return x = nums[n-1]", "= nums[::-1] return x = nums[n-1] while t < len(nums)", "nums in-place instead. \"\"\" if not nums: return n =", "t < len(nums) and x < nums[t]: t += 1", "< nums[t]: t += 1 temp = nums[t-1] nums[t-1] =", ":type nums: List[int] :rtype: void Do not return anything, modify", "\"\"\" if not nums: return n = len(nums)-1 while n", "t += 1 temp = nums[t-1] nums[t-1] = nums[n-1] nums[n-1]", "temp = nums[t-1] nums[t-1] = nums[n-1] nums[n-1] = temp nums[n:]", ":rtype: void Do not return anything, modify nums in-place instead.", "while n > 0 and nums[n-1] >= nums[n]: n -=", "if t == 0: nums[:] = nums[::-1] return x =", "0: nums[:] = nums[::-1] return x = nums[n-1] while t", "nums[t-1] nums[t-1] = nums[n-1] nums[n-1] = temp nums[n:] = nums[n:][::-1]", "0 and nums[n-1] >= nums[n]: n -= 1 t =", "nums[::-1] return x = nums[n-1] while t < len(nums) and", "== 0: nums[:] = nums[::-1] return x = nums[n-1] while", "not nums: return n = len(nums)-1 while n > 0", "+= 1 temp = nums[t-1] nums[t-1] = nums[n-1] nums[n-1] =", "void Do not return anything, modify nums in-place instead. \"\"\"", "modify nums in-place instead. \"\"\" if not nums: return n", "nums): \"\"\" :type nums: List[int] :rtype: void Do not return", "= n if t == 0: nums[:] = nums[::-1] return", "len(nums) and x < nums[t]: t += 1 temp =", "return anything, modify nums in-place instead. \"\"\" if not nums:", "and x < nums[t]: t += 1 temp = nums[t-1]", "nums[n-1] >= nums[n]: n -= 1 t = n if", "def nextPermutation(self, nums): \"\"\" :type nums: List[int] :rtype: void Do", "len(nums)-1 while n > 0 and nums[n-1] >= nums[n]: n", "> 0 and nums[n-1] >= nums[n]: n -= 1 t", "n = len(nums)-1 while n > 0 and nums[n-1] >=", "< len(nums) and x < nums[t]: t += 1 temp", "nums[n]: n -= 1 t = n if t ==", "1 t = n if t == 0: nums[:] =", "List[int] :rtype: void Do not return anything, modify nums in-place", "not return anything, modify nums in-place instead. \"\"\" if not", "t = n if t == 0: nums[:] = nums[::-1]", ">= nums[n]: n -= 1 t = n if t", "return x = nums[n-1] while t < len(nums) and x", "n -= 1 t = n if t == 0:", "if not nums: return n = len(nums)-1 while n >", "nums[t-1] = nums[n-1] nums[n-1] = temp nums[n:] = nums[n:][::-1] return", "nums[t]: t += 1 temp = nums[t-1] nums[t-1] = nums[n-1]", "1 temp = nums[t-1] nums[t-1] = nums[n-1] nums[n-1] = temp", "Do not return anything, modify nums in-place instead. \"\"\" if", "nums[:] = nums[::-1] return x = nums[n-1] while t <", "nums: List[int] :rtype: void Do not return anything, modify nums", "-= 1 t = n if t == 0: nums[:]", "Solution(object): def nextPermutation(self, nums): \"\"\" :type nums: List[int] :rtype: void", "nums[n-1] while t < len(nums) and x < nums[t]: t", "\"\"\" :type nums: List[int] :rtype: void Do not return anything,", "n > 0 and nums[n-1] >= nums[n]: n -= 1" ]